Compare commits

..

58 Commits

Author SHA1 Message Date
a505186051 Remove Drone 2024-03-04 13:38:57 +00:00
8fcca8571a Add Gitea Actions 2024-03-04 13:38:52 +00:00
757f153042 4.39.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-23 12:00:07 +00:00
a9f65bed60 4.39.1
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-20 12:00:07 +00:00
a8ca607581 4.38.3
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-14 12:00:07 +00:00
5b47bd1654 4.38.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-06 12:00:07 +00:00
e9faf93878 4.38.0
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-03 16:55:23 +00:00
0f60f7cec9 4.37.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-27 12:00:07 +00:00
3180034ff8 4.37.1
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-25 12:00:08 +00:00
b3ee67213d 4.37.0
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-18 12:00:07 +00:00
aeb34f8582 4.36.8
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-28 12:00:07 +00:00
2372b8f50f 4.36.7
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-21 12:00:09 +00:00
f3050b2ca0 4.36.6
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-17 14:56:57 +00:00
ebe941c8a5 4.36.5
All checks were successful
continuous-integration/drone/tag Build is passing
2023-11-30 12:00:09 +00:00
651b2dd52a 4.36.4 2023-11-22 12:00:09 +00:00
1c580cb6f7 4.36.3 2023-11-08 12:00:06 +00:00
21765ae9d8 4.35.6 2023-11-07 12:00:06 +00:00
d661a52f43 4.35.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-05 12:00:06 +01:00
45528ff81d 4.35.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-03 12:00:06 +01:00
6170fbf127 4.35.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-02 12:00:06 +01:00
c8ab76066b 4.35.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-09-29 12:00:06 +01:00
357d34a42b 4.34.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-31 12:00:06 +01:00
246754872d 4.34.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-29 20:20:00 +01:00
df59d73d66 4.34.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-25 12:00:05 +01:00
ff6d78f255 4.34.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-09 12:00:05 +01:00
d59fa5fe1c Update .drone.yml 2023-08-06 17:56:31 +00:00
de1fe02200 4.33.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-06 17:51:04 +01:00
439bfc5efd Update README.md 2023-08-06 16:04:57 +00:00
0a8a420850 Update README.md 2023-08-06 16:04:47 +00:00
d119e74c2f Update README.md 2023-08-06 16:04:41 +00:00
b5485429ef Remove provenance [CI SKIP] 2023-08-06 16:01:04 +00:00
f3a7900cbd 4.32.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-02 16:49:54 +01:00
0f91161ff3 4.32.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-07-12 11:00:04 +00:00
1da9a3f828 4.32.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-07-11 11:00:05 +00:00
167e56bc95 4.31.0
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-30 11:00:06 +00:00
c5a0d2d513 4.30.1
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-28 11:00:03 +00:00
25ebbaa7fd 4.30.0
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-27 11:00:04 +00:00
067d94841e 4.29.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-06-07 11:00:05 +00:00
804eec0c03 4.29.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-06-01 11:00:05 +00:00
651f3f1e9c 4.28.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-05-16 11:00:09 +00:00
fd988d6ef0 4.28.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-05-10 11:00:05 +00:00
da4a8cc979 4.27.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-25 11:00:05 +00:00
299da46abe 4.26.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-20 11:00:06 +00:00
1ad8294ec3 4.25.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-15 11:00:05 +00:00
f5de4a9624 4.24.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-11 11:00:05 +00:00
5501b033e8 4.23.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-24 12:00:07 +00:00
32a4b865ef 4.22.5
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-14 12:00:06 +00:00
5d5a23dd63 Update '.drone.yml' 2023-03-08 18:32:53 +00:00
03053d0e54 4.22.4
Some checks are pending
continuous-integration/drone/tag Build is running
2023-03-08 12:00:06 +00:00
4d70590d05 4.22.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-01 12:00:06 +00:00
bc879c10ae 4.22.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-02-16 12:00:05 +00:00
c6b237a004 Update 'README.md' 2023-02-10 13:00:46 +00:00
722979fe19 Update 'README.md' 2023-01-27 16:29:12 +00:00
b63ada023d Update '.drone.yml' 2023-01-27 16:26:22 +00:00
8b4e4e3a2b 4.22.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-01-17 12:00:04 +00:00
32465d1220 4.21.3
All checks were successful
continuous-integration/drone/tag Build is passing
2022-12-30 16:47:07 +00:00
98bae4c86c 4.21.3 2022-12-30 16:23:27 +00:00
7ff6cf2451 add drone 2022-12-30 15:35:10 +00:00
263 changed files with 4198 additions and 10741 deletions

View File

@ -5,7 +5,7 @@ on:
- '*'
env:
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login-dev
TEA_VERSION: 0.9.2
jobs:

View File

@ -14,4 +14,4 @@ venv/
.venv
.coverage
htmlcov
.git/
.git/

View File

@ -1,6 +1,7 @@
name: Test and lint
on: [push, pull_request]
on:
push:
jobs:
lint:
@ -109,7 +110,7 @@ jobs:
GITHUB_ACTIONS_TEST: true
- name: Archive code coverage results
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: code-coverage-report
path: htmlcov
@ -138,12 +139,6 @@ jobs:
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Create Sentry release
uses: getsentry/action-release@v1
env:
@ -163,7 +158,6 @@ jobs:
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64
push: true
tags: ${{ steps.meta.outputs.tags }}

3
app/.gitignore vendored
View File

@ -11,7 +11,8 @@ db.sqlite-journal
static/upload
venv/
.venv
.python-version
.coverage
htmlcov
adhoc
.env.*
.env.*

View File

@ -8,7 +8,7 @@ repos:
- id: check-yaml
- id: trailing-whitespace
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.34.1
rev: v1.3.0
hooks:
- id: djlint-jinja
files: '.*\.html'
@ -21,4 +21,5 @@ repos:
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
- id: ruff-format

View File

@ -20,15 +20,15 @@ SimpleLogin backend consists of 2 main components:
## Install dependencies
The project requires:
- Python 3.10 and poetry to manage dependencies
- Python 3.7+ and [poetry](https://python-poetry.org/) to manage dependencies
- Node v10 for front-end.
- Postgres 13+
- Postgres 12+
First, install all dependencies by running the following command.
Feel free to use `virtualenv` or similar tools to isolate development environment.
```bash
poetry sync
poetry install
```
On Mac, sometimes you might need to install some other packages via `brew`:
@ -68,12 +68,6 @@ For most tests, you will need to have ``redis`` installed and started on your ma
sh scripts/run-test.sh
```
You can also run tests using a local Postgres DB to speed things up. This can be done by
- creating an empty test DB and running the database migration by `dropdb test && createdb test && DB_URI=postgresql://localhost:5432/test alembic upgrade head`
- replacing the `DB_URI` in `test.env` file by `DB_URI=postgresql://localhost:5432/test`
## Run the code locally
Install npm packages
@ -157,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles:
## Pull request
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
The code is formatted using https://github.com/psf/black, to format the code, simply run
```
poetry run ruff format .
poetry run black .
```
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
@ -223,31 +217,6 @@ Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you sho
## Job runner
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
```bash
python job_runner.py
```
# Setup for Mac
There are several ways to setup Python and manage the project dependencies on Mac. For info we have successfully used this setup on a Mac silicon:
```bash
# we haven't managed to make python 3.12 work
brew install python3.10
# make sure to update the PATH so python, pip point to Python3
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
# Although pipx is the recommended way to install poetry,
# install pipx via brew will automatically install python 3.12
# and poetry will then use python 3.12
# so we recommend using poetry this way instead
curl -sSL https://install.python-poetry.org | python3 -
poetry install
# activate the virtualenv and you should be good to go!
source .venv/bin/activate
```

View File

@ -541,7 +541,7 @@ exit
Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations:
```.env
```
DISABLE_REGISTRATION=1
DISABLE_ONBOARDING=true
```

View File

@ -1,10 +1,7 @@
from __future__ import annotations
from typing import Optional
import arrow
import sqlalchemy
from flask_admin import BaseView
from flask_admin.form import SecureForm
from flask_admin.model.template import EndpointLinkRowAction
from markupsafe import Markup
@ -30,28 +27,10 @@ from app.models import (
Alias,
Newsletter,
PADDLE_SUBSCRIPTION_GRACE_DAYS,
Mailbox,
DeletedAlias,
DomainDeletedAlias,
PartnerUser,
AliasMailbox,
)
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_date_formatter(view, context, model, name):
return model.created_at.format()
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class SLModelView(sqla.ModelView):
column_default_sort = ("id", True)
column_display_pk = True
@ -67,8 +46,7 @@ class SLModelView(sqla.ModelView):
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
return redirect(url_for("auth.login", next=request.url))
def on_model_change(self, form, model, is_created):
changes = {}
@ -116,8 +94,11 @@ class SLAdminIndexView(AdminIndexView):
return redirect("/admin/user")
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class UserAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["email", "id"]
column_exclude_list = [
"salt",
@ -136,8 +117,6 @@ class UserAdmin(SLModelView):
column_formatters = {
"upgrade_channel": _user_upgrade_channel_formatter,
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action(
@ -364,29 +343,17 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
class EmailLogAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id"]
column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"]
can_edit = False
can_create = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class AliasAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "email", "mailbox.email"]
column_filters = ["id", "user.email", "email", "mailbox.email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action(
"disable_email_spoofing_check",
"Disable email spoofing protection",
@ -409,15 +376,9 @@ class AliasAdmin(SLModelView):
class MailboxAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "email"]
column_filters = ["id", "user.email", "email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
# class LifetimeCouponAdmin(SLModelView):
# can_edit = True
@ -425,26 +386,14 @@ class MailboxAdmin(SLModelView):
class CouponAdmin(SLModelView):
form_base_class = SecureForm
can_edit = False
can_create = True
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class ManualSubscriptionAdmin(SLModelView):
form_base_class = SecureForm
can_edit = True
column_searchable_list = ["id", "user.email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action(
"extend_1y",
"Extend for 1 year",
@ -483,27 +432,15 @@ class ManualSubscriptionAdmin(SLModelView):
class CustomDomainAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["domain", "user.email", "user.id"]
column_exclude_list = ["ownership_txt_token"]
can_edit = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class ReferralAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "code", "name"]
column_filters = ["id", "user.email", "code", "name"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
def scaffold_list_columns(self):
ret = super().scaffold_list_columns()
ret.insert(0, "nb_user")
@ -519,8 +456,16 @@ class ReferralAdmin(SLModelView):
# can_delete = True
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_created_at_formatter(view, context, model, name):
return model.created_at.format()
class AdminAuditLogAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"]
column_filters = ["admin.id", "admin.email", "model_id", "created_at"]
column_exclude_list = ["id"]
@ -531,8 +476,7 @@ class AdminAuditLogAdmin(SLModelView):
column_formatters = {
"action": _admin_action_formatter,
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
"created_at": _admin_created_at_formatter,
}
@ -552,7 +496,6 @@ def _transactionalcomplaint_refused_email_id_formatter(view, context, model, nam
class ProviderComplaintAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.id", "created_at"]
column_filters = ["user.id", "state"]
column_hide_backrefs = False
@ -561,8 +504,8 @@ class ProviderComplaintAdmin(SLModelView):
can_delete = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
"created_at": _admin_created_at_formatter,
"updated_at": _admin_created_at_formatter,
"state": _transactionalcomplaint_state_formatter,
"phase": _transactionalcomplaint_phase_formatter,
"refused_email": _transactionalcomplaint_refused_email_id_formatter,
@ -623,7 +566,6 @@ def _newsletter_html_formatter(view, context, model: Newsletter, name):
class NewsletterAdmin(SLModelView):
form_base_class = SecureForm
list_template = "admin/model/newsletter-list.html"
edit_template = "admin/model/newsletter-edit.html"
edit_modal = False
@ -705,7 +647,6 @@ class NewsletterAdmin(SLModelView):
class NewsletterUserAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id"]
column_filters = ["id", "user.email", "newsletter.subject"]
column_exclude_list = ["created_at", "updated_at", "id"]
@ -715,131 +656,17 @@ class NewsletterUserAdmin(SLModelView):
class DailyMetricAdmin(SLModelView):
form_base_class = SecureForm
column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True
class MetricAdmin(SLModelView):
form_base_class = SecureForm
column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True
class InvalidMailboxDomainAdmin(SLModelView):
form_base_class = SecureForm
can_create = True
can_delete = True
class EmailSearchResult:
no_match: bool = True
alias: Optional[Alias] = None
mailbox: list[Mailbox] = []
mailbox_count: int = 0
deleted_alias: Optional[DeletedAlias] = None
deleted_custom_alias: Optional[DomainDeletedAlias] = None
user: Optional[User] = None
@staticmethod
def from_email(email: str) -> EmailSearchResult:
output = EmailSearchResult()
alias = Alias.get_by(email=email)
if alias:
output.alias = alias
output.no_match = False
user = User.get_by(email=email)
if user:
output.user = user
output.no_match = False
mailboxes = (
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
)
if mailboxes:
output.mailbox = mailboxes
output.mailbox_count = Mailbox.filter_by(email=email).count()
output.no_match = False
deleted_alias = DeletedAlias.get_by(email=email)
if deleted_alias:
output.deleted_alias = deleted_alias
output.no_match = False
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
if domain_deleted_alias:
output.domain_deleted_alias = domain_deleted_alias
output.no_match = False
return output
class EmailSearchHelpers:
@staticmethod
def mailbox_list(user: User) -> list[Mailbox]:
return (
Mailbox.filter_by(user_id=user.id)
.order_by(Mailbox.id.asc())
.limit(10)
.all()
)
@staticmethod
def mailbox_count(user: User) -> int:
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
@staticmethod
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
return (
Session.query(Mailbox)
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
.union(
Session.query(Mailbox)
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
.filter(AliasMailbox.alias_id == alias.id)
)
.order_by(Mailbox.id)
.limit(10)
.all()
)
@staticmethod
def alias_mailbox_count(alias: Alias) -> int:
return len(alias.mailboxes)
@staticmethod
def alias_list(user: User) -> list[Alias]:
return (
Alias.filter_by(user_id=user.id).order_by(Alias.id.desc()).limit(10).all()
)
@staticmethod
def alias_count(user: User) -> int:
return Alias.filter_by(user_id=user.id).count()
@staticmethod
def partner_user(user: User) -> Optional[PartnerUser]:
return PartnerUser.get_by(user_id=user.id)
class EmailSearchAdmin(BaseView):
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
@expose("/", methods=["GET", "POST"])
def index(self):
search = EmailSearchResult()
email = request.args.get("email")
if email is not None and len(email) > 0:
email = email.strip()
search = EmailSearchResult.from_email(email)
return self.render(
"admin/email_search.html",
email=email,
data=search,
helper=EmailSearchHelpers,
)

View File

@ -64,12 +64,8 @@ def verify_prefix_suffix(
# SimpleLogin domain case:
# 1) alias_suffix must start with "." and
# 2) alias_domain_prefix must come from the word list
available_sl_domains = [
sl_domain.domain
for sl_domain in user.get_sl_domains(alias_options=alias_options)
]
if (
alias_domain in available_sl_domains
alias_domain in user.available_sl_domains(alias_options=alias_options)
and alias_domain not in user_custom_domains
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
and not config.DISABLE_ALIAS_SUFFIX
@ -84,7 +80,9 @@ def verify_prefix_suffix(
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False
if alias_domain not in available_sl_domains:
if alias_domain not in user.available_sl_domains(
alias_options=alias_options
):
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False

View File

@ -1,7 +1,6 @@
import csv
from io import StringIO
import re
from dataclasses import dataclass
from typing import Optional, Tuple
from email_validator import validate_email, EmailNotValidError
@ -24,20 +23,11 @@ from app.email_utils import (
send_cannot_create_domain_alias,
send_email,
render,
sl_formataddr,
)
from app.errors import AliasInTrashError
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import (
AliasDeleted,
AliasStatusChanged,
EventContent,
AliasCreated,
)
from app.log import LOG
from app.models import (
Alias,
AliasDeleteReason,
CustomDomain,
Directory,
User,
@ -66,16 +56,12 @@ def get_user_if_alias_would_auto_create(
# Prevent addresses with unicode characters (🤯) in them for now.
validate_email(address, check_deliverability=False, allow_smtputf8=False)
except EmailNotValidError:
LOG.i(f"Not creating alias for {address} because email is invalid")
return None
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
address, notify_user=notify_user
)
if DomainDeletedAlias.get_by(email=address):
LOG.i(
f"Not creating alias for {address} because it was previously deleted for this domain"
)
return None
if domain_and_rule:
return domain_and_rule[0].user
@ -100,9 +86,6 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain)
if not custom_domain:
LOG.i(
f"Cannot auto-create custom domain alias for {address} because there's no custom domain for {alias_domain}"
)
return None
user: User = custom_domain.user
@ -118,9 +101,6 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
if not custom_domain.catch_all:
if len(custom_domain.auto_create_rules) == 0:
LOG.i(
f"Cannot create alias {address} for domain {custom_domain} because it has no catch-all and no rules"
)
return None
local = get_email_local_part(address)
@ -134,7 +114,7 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
)
return custom_domain, rule
else: # no rule passes
LOG.d(f"No rule matches auto-create {address} for domain {custom_domain}")
LOG.d("no rule passed to create %s", local)
return None
LOG.d("Create alias via catchall")
@ -161,7 +141,6 @@ def check_if_alias_can_be_auto_created_for_a_directory(
sep = "#"
else:
# if there's no directory separator in the alias, no way to auto-create it
LOG.info(f"Cannot auto-create {address} since it has no directory separator")
return None
directory_name = address[: address.find(sep)]
@ -169,9 +148,6 @@ def check_if_alias_can_be_auto_created_for_a_directory(
directory = Directory.get_by(name=directory_name)
if not directory:
LOG.info(
f"Cannot auto-create {address} because there is no directory for {directory_name}"
)
return None
user: User = directory.user
@ -180,17 +156,12 @@ def check_if_alias_can_be_auto_created_for_a_directory(
return None
if not user.can_create_new_alias():
LOG.d(
f"{user} can't create new directory alias {address} because user cannot create aliases"
)
LOG.d(f"{user} can't create new directory alias {address}")
if notify_user:
send_cannot_create_directory_alias(user, address, directory_name)
return None
if directory.disabled:
LOG.d(
f"{user} can't create new directory alias {address} bcause directory is disabled"
)
if notify_user:
send_cannot_create_directory_alias_disabled(user, address, directory_name)
return None
@ -332,52 +303,36 @@ def try_auto_create_via_domain(address: str) -> Optional[Alias]:
return None
def delete_alias(
alias: Alias,
user: User,
reason: AliasDeleteReason = AliasDeleteReason.Unspecified,
commit: bool = False,
):
def delete_alias(alias: Alias, user: User):
"""
Delete an alias and add it to either global or domain trash
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
"""
LOG.i(f"User {user} has deleted alias {alias}")
# save deleted alias to either global or domain tra
# save deleted alias to either global or domain trash
if alias.custom_domain_id:
if not DomainDeletedAlias.get_by(
email=alias.email, domain_id=alias.custom_domain_id
):
domain_deleted_alias = DomainDeletedAlias(
user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
reason=reason,
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
Session.add(
DomainDeletedAlias(
user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
)
)
Session.add(domain_deleted_alias)
Session.commit()
LOG.i(
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
)
else:
if not DeletedAlias.get_by(email=alias.email):
deleted_alias = DeletedAlias(email=alias.email, reason=reason)
Session.add(deleted_alias)
LOG.d("add %s to global trash", alias)
Session.add(DeletedAlias(email=alias.email))
Session.commit()
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
alias_id = alias.id
alias_email = alias.email
LOG.i("delete alias %s", alias)
Alias.filter(Alias.id == alias.id).delete()
Session.commit()
EventDispatcher.send_event(
user,
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
)
if commit:
Session.commit()
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
"""
@ -487,12 +442,10 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
user=old_user,
alias=alias,
),
render(
"transactional/alias-transferred.html",
user=old_user,
alias=alias,
),
)
@ -504,69 +457,4 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
alias.disable_pgp = False
alias.pinned = False
EventDispatcher.send_event(
old_user,
EventContent(
alias_deleted=AliasDeleted(
id=alias.id,
email=alias.email,
)
),
)
EventDispatcher.send_event(
new_user,
EventContent(
alias_created=AliasCreated(
id=alias.id,
email=alias.email,
note=alias.note,
enabled=alias.enabled,
created_at=int(alias.created_at.timestamp),
)
),
)
Session.commit()
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
LOG.i(f"Changing alias {alias} enabled to {enabled}")
alias.enabled = enabled
event = AliasStatusChanged(
id=alias.id,
email=alias.email,
enabled=enabled,
created_at=int(alias.created_at.timestamp),
)
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
if commit:
Session.commit()
@dataclass
class AliasRecipientName:
name: str
message: Optional[str] = None
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
"""
Logic:
1. If alias has name, use it
2. If alias has custom domain, and custom domain has name, use it
3. Otherwise, use the alias email as the recipient
"""
if alias.name:
return AliasRecipientName(
name=sl_formataddr((alias.name, alias.email)),
message=f"Put alias name {alias.name} in from header",
)
elif alias.custom_domain:
if alias.custom_domain.name:
return AliasRecipientName(
name=sl_formataddr((alias.custom_domain.name, alias.email)),
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
)
return AliasRecipientName(name=alias.email)

View File

@ -19,9 +19,6 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if not api_key:
if current_user.is_authenticated:
# if current_user.is_authenticated and request.headers.get(
# constants.HEADER_ALLOW_API_COOKIES
# ):
g.user = current_user
else:
return jsonify(error="Wrong api key"), 401

View File

@ -25,8 +25,7 @@ from app.errors import (
ErrAddressInvalid,
)
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, Contact, Mailbox, AliasMailbox, AliasDeleteReason
from app.models import Alias, Contact, Mailbox, AliasMailbox
@deprecated
@ -161,7 +160,7 @@ def delete_alias(alias_id):
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_utils.delete_alias(alias, user, AliasDeleteReason.ManualAction)
alias_utils.delete_alias(alias, user)
return jsonify(deleted=True), 200
@ -185,8 +184,7 @@ def toggle_alias(alias_id):
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
alias.enabled = not alias.enabled
Session.commit()
return jsonify(enabled=alias.enabled), 200
@ -424,7 +422,7 @@ def create_contact_route(alias_id):
contact_address = data.get("contact")
try:
contact = create_contact(alias, contact_address)
contact = create_contact(g.user, alias, contact_address)
except ErrContactErrorUpgradeNeeded as err:
return jsonify(error=err.error_for_user()), 403
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:

View File

@ -11,7 +11,7 @@ from itsdangerous import Signer
from app import email_utils
from app.api.base import api_bp
from app.config import FLASK_SECRET, DISABLE_REGISTRATION
from app.dashboard.views.account_setting import send_reset_password_email
from app.dashboard.views.setting import send_reset_password_email
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
@ -52,12 +52,8 @@ def auth_login():
password = data.get("password")
device = data.get("device")
email = data.get("email")
if not email:
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
return jsonify(error="Email or password incorrect"), 400
email = sanitize_email(email)
canonical_email = canonicalize_email(email)
email = sanitize_email(data.get("email"))
canonical_email = canonicalize_email(data.get("email"))
user = User.get_by(email=email) or User.get_by(email=canonical_email)
@ -133,8 +129,8 @@ def auth_register():
send_email(
email,
"Just one more step to join SimpleLogin",
render("transactional/code-activation.txt.jinja2", user=user, code=code),
render("transactional/code-activation.html", user=user, code=code),
render("transactional/code-activation.txt.jinja2", code=code),
render("transactional/code-activation.html", code=code),
)
RegisterEvent(RegisterEvent.ActionType.success, RegisterEvent.Source.api).send()
@ -230,8 +226,8 @@ def auth_reactivate():
send_email(
email,
"Just one more step to join SimpleLogin",
render("transactional/code-activation.txt.jinja2", user=user, code=code),
render("transactional/code-activation.html", user=user, code=code),
render("transactional/code-activation.txt.jinja2", code=code),
render("transactional/code-activation.html", code=code),
)
return jsonify(msg="User needs to confirm their account"), 200

View File

@ -1,18 +1,22 @@
from smtplib import SMTPRecipientsRefused
import arrow
from flask import g
from flask import jsonify
from flask import request
from app import mailbox_utils
from app.api.base import api_bp, require_api_auth
from app.config import JOB_DELETE_MAILBOX
from app.dashboard.views.mailbox import send_verification_email
from app.dashboard.views.mailbox_detail import verify_mailbox_change
from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
)
from app.models import Mailbox
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import sanitize_email
@ -40,15 +44,31 @@ def create_mailbox():
user = g.user
mailbox_email = sanitize_email(request.get_json().get("email"))
try:
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
except mailbox_utils.MailboxError as e:
return jsonify(error=e.msg), 400
if not user.is_premium():
return jsonify(error="Only premium plan can add additional mailbox"), 400
return (
jsonify(mailbox_to_dict(new_mailbox)),
201,
)
if not is_valid_email(mailbox_email):
return jsonify(error=f"{mailbox_email} invalid"), 400
elif mailbox_already_used(mailbox_email, user):
return jsonify(error=f"{mailbox_email} already used"), 400
elif not email_can_be_used_as_mailbox(mailbox_email):
return (
jsonify(
error=f"{mailbox_email} cannot be used. Please note a mailbox cannot "
f"be a disposable email address"
),
400,
)
else:
new_mailbox = Mailbox.create(email=mailbox_email, user_id=user.id)
Session.commit()
send_verification_email(user, new_mailbox)
return (
jsonify(mailbox_to_dict(new_mailbox)),
201,
)
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["DELETE"])
@ -66,17 +86,47 @@ def delete_mailbox(mailbox_id):
"""
user = g.user
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
return jsonify(error="Forbidden"), 403
if mailbox.id == user.default_mailbox_id:
return jsonify(error="You cannot delete the default mailbox"), 400
data = request.get_json() or {}
transfer_mailbox_id = data.get("transfer_aliases_to")
if transfer_mailbox_id and int(transfer_mailbox_id) >= 0:
transfer_mailbox_id = int(transfer_mailbox_id)
else:
transfer_mailbox_id = None
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
try:
mailbox_utils.delete_mailbox(user, mailbox_id, transfer_mailbox_id)
except mailbox_utils.MailboxError as e:
return jsonify(error=e.msg), 400
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
return (
jsonify(error="You must transfer the aliases to a mailbox you own."),
403,
)
if transfer_mailbox_id == mailbox_id:
return (
jsonify(
error="You can not transfer the aliases to the mailbox you want to delete."
),
400,
)
if not transfer_mailbox.verified:
return jsonify(error="Your new mailbox is not verified"), 400
# Schedule delete account job
LOG.w("schedule delete mailbox job for %s", mailbox)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id,
},
run_at=arrow.now(),
commit=True,
)
return jsonify(deleted=True), 200

View File

@ -10,7 +10,6 @@ from app.api.base import api_bp, require_api_auth
from app.config import SESSION_COOKIE_NAME
from app.dashboard.views.index import get_stats
from app.db import Session
from app.image_validation import detect_image_format, ImageFormat
from app.models import ApiKey, File, PartnerUser, User
from app.proton.utils import get_proton_partner
from app.session import logout_session
@ -79,18 +78,17 @@ def update_user_info():
data = request.get_json() or {}
if "profile_picture" in data:
if user.profile_picture_id:
file = user.profile_picture
user.profile_picture_id = None
Session.flush()
if file:
File.delete(file.id)
s3.delete(file.path)
if data["profile_picture"] is None:
if user.profile_picture_id:
file = user.profile_picture
user.profile_picture_id = None
Session.flush()
if data["profile_picture"] is not None:
if file:
File.delete(file.id)
s3.delete(file.path)
Session.flush()
else:
raw_data = base64.decodebytes(data["profile_picture"].encode())
if detect_image_format(raw_data) == ImageFormat.Unknown:
return jsonify(error="Unsupported image format"), 400
file_path = random_string(30)
file = File.create(user_id=user.id, path=file_path)
Session.flush()

View File

@ -16,7 +16,6 @@ from .views import (
social,
recovery,
api_to_cookie,
oidc,
)
__all__ = [
@ -37,5 +36,4 @@ __all__ = [
"social",
"recovery",
"api_to_cookie",
"oidc",
]

View File

@ -3,13 +3,10 @@ from flask_login import login_user
from app.auth.base import auth_bp
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import EmailChange, ResetPasswordCode
@auth_bp.route("/change_email", methods=["GET", "POST"])
@limiter.limit("3/hour")
def change_email():
code = request.args.get("code")
@ -25,14 +22,12 @@ def change_email():
return render_template("auth/change_email.html")
user = email_change.user
old_email = user.email
user.email = email_change.new_email
EmailChange.delete(email_change.id)
ResetPasswordCode.filter_by(user_id=user.id).delete()
Session.commit()
LOG.i(f"User {user} has changed their email from {old_email} to {user.email}")
flash("Your new email has been updated", "success")
login_user(user)

View File

@ -3,7 +3,7 @@ from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.dashboard.views.account_setting import send_reset_password_email
from app.dashboard.views.setting import send_reset_password_email
from app.extensions import limiter
from app.log import LOG
from app.models import User

View File

@ -7,7 +7,7 @@ from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
from app.db import Session
from app.log import LOG
from app.models import User, File, SocialAuth
from app.utils import random_string, sanitize_email, sanitize_next_url
from app.utils import random_string, sanitize_email
from .login_utils import after_login
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
@ -29,7 +29,7 @@ def google_login():
# to avoid flask-login displaying the login error message
session.pop("_flashes", None)
next_url = sanitize_next_url(request.args.get("next"))
next_url = request.args.get("next")
# Google does not allow to append param to redirect_url
# we need to pass the next url by session

View File

@ -5,7 +5,7 @@ from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON, OIDC_CLIENT_ID
from app.config import CONNECT_WITH_PROTON
from app.events.auth_event import LoginEvent
from app.extensions import limiter
from app.log import LOG
@ -77,6 +77,4 @@ def login():
next_url=next_url,
show_resend_activation=show_resend_activation,
connect_with_proton=CONNECT_WITH_PROTON,
connect_with_oidc=OIDC_CLIENT_ID is not None,
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
)

View File

@ -1,135 +0,0 @@
from flask import request, session, redirect, flash, url_for
from requests_oauthlib import OAuth2Session
import requests
from app import config
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import (
URL,
OIDC_SCOPES,
OIDC_NAME_FIELD,
)
from app.db import Session
from app.email_utils import send_welcome_email
from app.log import LOG
from app.models import User, SocialAuth
from app.utils import sanitize_email, sanitize_next_url
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
redirect_uri = URL + "/auth/oidc/callback"
SESSION_STATE_KEY = "oauth_state"
SESSION_NEXT_KEY = "oauth_redirect_next"
@auth_bp.route("/oidc/login")
def oidc_login():
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
return redirect(url_for("auth.login"))
next_url = sanitize_next_url(request.args.get("next"))
auth_url = requests.get(config.OIDC_WELL_KNOWN_URL).json()["authorization_endpoint"]
oidc = OAuth2Session(
config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri
)
authorization_url, state = oidc.authorization_url(auth_url)
# State is used to prevent CSRF, keep this for later.
session[SESSION_STATE_KEY] = state
session[SESSION_NEXT_KEY] = next_url
return redirect(authorization_url)
@auth_bp.route("/oidc/callback")
def oidc_callback():
if SESSION_STATE_KEY not in session:
flash("Invalid state, please retry", "error")
return redirect(url_for("auth.login"))
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
return redirect(url_for("auth.login"))
# user clicks on cancel
if "error" in request.args:
flash("Please use another sign in method then", "warning")
return redirect("/")
oidc_configuration = requests.get(config.OIDC_WELL_KNOWN_URL).json()
user_info_url = oidc_configuration["userinfo_endpoint"]
token_url = oidc_configuration["token_endpoint"]
oidc = OAuth2Session(
config.OIDC_CLIENT_ID,
state=session[SESSION_STATE_KEY],
scope=[OIDC_SCOPES],
redirect_uri=redirect_uri,
)
oidc.fetch_token(
token_url,
client_secret=config.OIDC_CLIENT_SECRET,
authorization_response=request.url,
)
oidc_user_data = oidc.get(user_info_url)
if oidc_user_data.status_code != 200:
LOG.e(
f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}"
)
flash(
"Cannot get user data from OIDC, please use another way to login/sign up",
"error",
)
return redirect(url_for("auth.login"))
oidc_user_data = oidc_user_data.json()
email = oidc_user_data.get("email")
if not email:
LOG.e(f"cannot get email for OIDC user {oidc_user_data} {email}")
flash(
"Cannot get a valid email from OIDC, please another way to login/sign up",
"error",
)
return redirect(url_for("auth.login"))
email = sanitize_email(email)
user = User.get_by(email=email)
if not user and config.DISABLE_REGISTRATION:
flash(
"Sorry you cannot sign up via the OIDC provider. Please sign-up first with your email.",
"error",
)
return redirect(url_for("auth.register"))
elif not user:
user = create_user(email, oidc_user_data)
if not SocialAuth.get_by(user_id=user.id, social="oidc"):
SocialAuth.create(user_id=user.id, social="oidc")
Session.commit()
# The activation link contains the original page, for ex authorize page
next_url = session[SESSION_NEXT_KEY]
session[SESSION_NEXT_KEY] = None
return after_login(user, next_url)
def create_user(email, oidc_user_data):
new_user = User.create(
email=email,
name=oidc_user_data.get(OIDC_NAME_FIELD),
password="",
activated=True,
)
LOG.i(f"Created new user for login request from OIDC. New user {new_user.id}")
Session.commit()
send_welcome_email(new_user)
return new_user

View File

@ -6,7 +6,7 @@ from wtforms import StringField, validators
from app import email_utils, config
from app.auth.base import auth_bp
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON
from app.config import CONNECT_WITH_PROTON
from app.auth.views.login_utils import get_referral
from app.config import URL, HCAPTCHA_SECRET, HCAPTCHA_SITEKEY
from app.db import Session
@ -109,14 +109,11 @@ def register():
next_url=next_url,
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
connect_with_proton=CONNECT_WITH_PROTON,
connect_with_oidc=config.OIDC_CLIENT_ID is not None,
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
)
def send_activation_email(user, next_url):
# the activation code is valid for 1h and delete all previous codes
Session.query(ActivationCode).filter(ActivationCode.user_id == user.id).delete()
# the activation code is valid for 1h
activation = ActivationCode.create(user_id=user.id, code=random_string(30))
Session.commit()
@ -126,4 +123,4 @@ def send_activation_email(user, next_url):
LOG.d("redirect user to %s after activation", next_url)
activation_link = activation_link + "&next=" + encode_url(next_url)
email_utils.send_activation_email(user, activation_link)
email_utils.send_activation_email(user.email, activation_link)

View File

@ -3,7 +3,7 @@ import random
import socket
import string
from ast import literal_eval
from typing import Callable, List, Optional
from typing import Callable, List
from urllib.parse import urlparse
from dotenv import load_dotenv
@ -35,33 +35,6 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
return literal_eval(value)
def get_env_dict(env_var: str) -> dict[str, str]:
"""
Get an env variable and convert it into a python dictionary with keys and values as strings.
Args:
env_var (str): env var, example: SL_DB
Syntax is: key1=value1;key2=value2
Components separated by ;
key and value separated by =
"""
value = os.getenv(env_var)
if not value:
return {}
components = value.split(";")
result = {}
for component in components:
if component == "":
continue
parts = component.split("=")
if len(parts) != 2:
raise Exception(f"Invalid config for env var {env_var}")
result[parts[0].strip()] = parts[1].strip()
return result
config_file = os.environ.get("CONFIG")
if config_file:
config_file = get_abs_path(config_file)
@ -147,7 +120,7 @@ if POSTFIX_SUBMISSION_TLS:
else:
default_postfix_port = 25
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", default_postfix_port))
POSTFIX_TIMEOUT = int(os.environ.get("POSTFIX_TIMEOUT", 3))
POSTFIX_TIMEOUT = os.environ.get("POSTFIX_TIMEOUT", 3)
# ["domain1.com", "domain2.com"]
OTHER_ALIAS_DOMAINS = sl_getenv("OTHER_ALIAS_DOMAINS", list)
@ -261,7 +234,7 @@ else:
print("WARNING: Use a temp directory for GNUPGHOME", GNUPGHOME)
# Github, Google, Facebook, OIDC client id and secrets
# Github, Google, Facebook client id and secrets
GITHUB_CLIENT_ID = os.environ.get("GITHUB_CLIENT_ID")
GITHUB_CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
@ -271,13 +244,6 @@ GOOGLE_CLIENT_SECRET = os.environ.get("GOOGLE_CLIENT_SECRET")
FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON")
OIDC_WELL_KNOWN_URL = os.environ.get("OIDC_WELL_KNOWN_URL")
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID")
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET")
OIDC_SCOPES = os.environ.get("OIDC_SCOPES")
OIDC_NAME_FIELD = os.environ.get("OIDC_NAME_FIELD", "name")
PROTON_CLIENT_ID = os.environ.get("PROTON_CLIENT_ID")
PROTON_CLIENT_SECRET = os.environ.get("PROTON_CLIENT_SECRET")
PROTON_BASE_URL = os.environ.get(
@ -308,7 +274,6 @@ JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
# for pagination
PAGE_LIMIT = 20
@ -456,11 +421,6 @@ try:
except Exception:
HIBP_SCAN_INTERVAL_DAYS = 7
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
HIBP_MAX_ALIAS_CHECK = 10_000
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100))
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
KEEP_OLD_DATA_DAYS = 30
POSTMASTER = os.environ.get("POSTMASTER")
@ -607,59 +567,3 @@ MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
# We want it disabled by default, so only skip if defined
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ
def read_webhook_enabled_user_ids() -> Optional[List[int]]:
user_ids = os.environ.get("EVENT_WEBHOOK_ENABLED_USER_IDS", None)
if user_ids is None:
return None
ids = []
for user_id in user_ids.split(","):
try:
ids.append(int(user_id.strip()))
except ValueError:
pass
return ids
EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_ids()
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
# It defaults to the regular DB_URI in case it's needed
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
def read_partner_dict(var: str) -> dict[int, str]:
partner_value = get_env_dict(var)
if len(partner_value) == 0:
return {}
res: dict[int, str] = {}
for partner_id in partner_value.keys():
try:
partner_id_int = int(partner_id.strip())
res[partner_id_int] = partner_value[partner_id]
except ValueError:
pass
return res
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
"PARTNER_DNS_CUSTOM_DOMAINS"
)
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
)
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
)

View File

@ -1,2 +0,0 @@
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"

View File

@ -1,113 +0,0 @@
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from sqlalchemy.exc import IntegrityError
from app.db import Session
from app.email_utils import generate_reply_email, parse_full_address
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Contact, Alias
from app.utils import sanitize_email
class ContactCreateError(Enum):
InvalidEmail = "Invalid email"
NotAllowed = "Your plan does not allow to create contacts"
@dataclass
class ContactCreateResult:
contact: Optional[Contact]
created: bool
error: Optional[ContactCreateError]
def __update_contact_if_needed(
contact: Contact, name: Optional[str], mail_from: Optional[str]
) -> ContactCreateResult:
if name and contact.name != name:
LOG.d(f"Setting {contact} name to {name}")
contact.name = name
Session.commit()
if mail_from and contact.mail_from is None:
LOG.d(f"Setting {contact} mail_from to {mail_from}")
contact.mail_from = mail_from
Session.commit()
return ContactCreateResult(contact, created=False, error=None)
def create_contact(
email: str,
alias: Alias,
name: Optional[str] = None,
mail_from: Optional[str] = None,
allow_empty_email: bool = False,
automatic_created: bool = False,
from_partner: bool = False,
) -> ContactCreateResult:
# If user cannot create contacts, they still need to be created when receiving an email for an alias
if not automatic_created and not alias.user.can_create_contacts():
return ContactCreateResult(
None, created=False, error=ContactCreateError.NotAllowed
)
# Parse emails with form 'name <email>'
try:
email_name, email = parse_full_address(email)
except ValueError:
email = ""
email_name = ""
# If no name is explicitly given try to get it from the parsed email
if name is None:
name = email_name[: Contact.MAX_NAME_LENGTH]
else:
name = name[: Contact.MAX_NAME_LENGTH]
# If still no name is there, make sure the name is None instead of empty string
if not name:
name = None
if name is not None and "\x00" in name:
LOG.w("Cannot use contact name because has \\x00")
name = ""
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
email = sanitize_email(email, not_lower=True)
if not is_valid_email(email):
LOG.w(f"invalid contact email {email}")
if not allow_empty_email:
return ContactCreateResult(
None, created=False, error=ContactCreateError.InvalidEmail
)
LOG.d("Create a contact with invalid email for %s", alias)
# either reuse a contact with empty email or create a new contact with empty email
email = ""
# If contact exists, update name and mail_from if needed
contact = Contact.get_by(alias_id=alias.id, website_email=email)
if contact is not None:
return __update_contact_if_needed(contact, name, mail_from)
# Create the contact
reply_email = generate_reply_email(email, alias)
try:
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=email,
name=name,
reply_email=reply_email,
mail_from=mail_from,
automatic_created=automatic_created,
flags=flags,
invalid_email=email == "",
commit=True,
)
LOG.d(
f"Created contact {contact} for alias {alias} with email {email} invalid_email={contact.invalid_email}"
)
except IntegrityError:
Session.rollback()
LOG.info(
f"Contact with email {email} for alias_id {alias.id} already existed, fetching from DB"
)
contact = Contact.get_by(alias_id=alias.id, website_email=email)
return __update_contact_if_needed(contact, name, mail_from)
return ContactCreateResult(contact, created=True, error=None)

View File

@ -1,194 +0,0 @@
import arrow
import re
from dataclasses import dataclass
from enum import Enum
from typing import List, Optional
from app.config import JOB_DELETE_DOMAIN
from app.db import Session
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
_MAX_MAILBOXES_PER_DOMAIN = 20
@dataclass
class CreateCustomDomainResult:
message: str = ""
message_category: str = ""
success: bool = False
instance: Optional[CustomDomain] = None
redirect: Optional[str] = None
class CannotUseDomainReason(Enum):
InvalidDomain = 1
BuiltinDomain = 2
DomainAlreadyUsed = 3
DomainPartOfUserEmail = 4
DomainUserInMailbox = 5
def message(self, domain: str) -> str:
if self == CannotUseDomainReason.InvalidDomain:
return "This is not a valid domain"
elif self == CannotUseDomainReason.BuiltinDomain:
return "A custom domain cannot be a built-in domain."
elif self == CannotUseDomainReason.DomainAlreadyUsed:
return f"{domain} already used"
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
elif self == CannotUseDomainReason.DomainUserInMailbox:
return f"{domain} already used in a SimpleLogin mailbox"
else:
raise Exception("Invalid CannotUseDomainReason")
class CannotSetCustomDomainMailboxesCause(Enum):
InvalidMailbox = "Something went wrong, please retry"
NoMailboxes = "You must select at least 1 mailbox"
TooManyMailboxes = (
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
)
@dataclass
class SetCustomDomainMailboxesResult:
success: bool
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
def is_valid_domain(domain: str) -> bool:
"""
Checks that a domain is valid according to RFC 1035
"""
if len(domain) > 255:
return False
if domain.endswith("."):
domain = domain[:-1] # Strip the trailing dot
labels = domain.split(".")
if not labels:
return False
for label in labels:
if not _ALLOWED_DOMAIN_REGEX.match(label):
return False
return True
def sanitize_domain(domain: str) -> str:
new_domain = domain.lower().strip()
if new_domain.startswith("http://"):
new_domain = new_domain[len("http://") :]
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
return new_domain
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
if not is_valid_domain(domain):
return CannotUseDomainReason.InvalidDomain
elif SLDomain.get_by(domain=domain):
return CannotUseDomainReason.BuiltinDomain
elif CustomDomain.get_by(domain=domain):
return CannotUseDomainReason.DomainAlreadyUsed
elif get_email_domain_part(user.email) == domain:
return CannotUseDomainReason.DomainPartOfUserEmail
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
).first():
return CannotUseDomainReason.DomainUserInMailbox
else:
return None
def create_custom_domain(
user: User, domain: str, partner_id: Optional[int] = None
) -> CreateCustomDomainResult:
if not user.is_premium():
return CreateCustomDomainResult(
message="Only premium plan can add custom domain",
message_category="warning",
)
new_domain = sanitize_domain(domain)
domain_forbidden_cause = can_domain_be_used(user, new_domain)
if domain_forbidden_cause:
return CreateCustomDomainResult(
message=domain_forbidden_cause.message(new_domain), message_category="error"
)
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in user.custom_domains:
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
# Add the partner_id in case it's passed
if partner_id is not None:
new_custom_domain.partner_id = partner_id
Session.commit()
return CreateCustomDomainResult(
success=True,
instance=new_custom_domain,
)
def delete_custom_domain(domain: CustomDomain):
# Schedule delete domain job
LOG.w("schedule delete domain job for %s", domain)
domain.pending_deletion = True
Job.create(
name=JOB_DELETE_DOMAIN,
payload={"custom_domain_id": domain.id},
run_at=arrow.now(),
commit=True,
)
def set_custom_domain_mailboxes(
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
) -> SetCustomDomainMailboxesResult:
if len(mailbox_ids) == 0:
return SetCustomDomainMailboxesResult(
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
)
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
return SetCustomDomainMailboxesResult(
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
)
mailboxes = (
Session.query(Mailbox)
.filter(
Mailbox.id.in_(mailbox_ids),
Mailbox.user_id == user_id,
Mailbox.verified == True, # noqa: E712
)
.all()
)
if len(mailboxes) != len(mailbox_ids):
return SetCustomDomainMailboxesResult(
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
Session.commit()
return SetCustomDomainMailboxesResult(success=True)

View File

@ -1,201 +1,37 @@
from dataclasses import dataclass
from typing import List, Optional
from app import config
from app.constants import DMARC_RECORD
from app.db import Session
from app.dns_utils import (
MxRecord,
DNSClient,
is_mx_equivalent,
get_network_dns_client,
)
from app.dns_utils import get_cname_record
from app.models import CustomDomain
from app.utils import random_string
@dataclass
class DomainValidationResult:
success: bool
errors: [str]
class CustomDomainValidation:
def __init__(
self,
dkim_domain: str,
dns_client: DNSClient = get_network_dns_client(),
partner_domains: Optional[dict[int, str]] = None,
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
):
def __init__(self, dkim_domain: str):
self.dkim_domain = dkim_domain
self._dns_client = dns_client
self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
self._partner_domain_validation_prefixes = (
partner_domains_validation_prefixes
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
)
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
prefix = "sl"
if (
domain.partner_id is not None
and domain.partner_id in self._partner_domain_validation_prefixes
):
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
if not domain.ownership_txt_token:
domain.ownership_txt_token = random_string(30)
Session.commit()
return f"{prefix}-verification={domain.ownership_txt_token}"
def get_expected_mx_records(self, domain: CustomDomain) -> list[MxRecord]:
records = []
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
domain = self._partner_domains[domain.partner_id]
records.append(MxRecord(10, f"mx1.{domain}."))
records.append(MxRecord(20, f"mx2.{domain}."))
else:
# Default ones
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
records.append(MxRecord(priority, domain))
return records
def get_expected_spf_domain(self, domain: CustomDomain) -> str:
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
return self._partner_domains[domain.partner_id]
else:
return config.EMAIL_DOMAIN
def get_expected_spf_record(self, domain: CustomDomain) -> str:
spf_domain = self.get_expected_spf_domain(domain)
return f"v=spf1 include:{spf_domain} ~all"
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
"""
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
it will return the default ones or the partner ones.
"""
# By default use the default domain
dkim_domain = self.dkim_domain
if domain.partner_id is not None:
# Domain is from a partner. Retrieve the partner config and use that domain if exists
dkim_domain = self._partner_domains.get(domain.partner_id, dkim_domain)
return {
f"{key}._domainkey": f"{key}._domainkey.{dkim_domain}"
self._dkim_records = {
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}")
for key in ("dkim", "dkim02", "dkim03")
}
def get_dkim_records(self) -> {str: str}:
"""
Get a list of dkim records to set up. It will be
"""
return self._dkim_records
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
"""
Check if dkim records are properly set for this custom domain.
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
"""
correct_records = {}
invalid_records = {}
expected_records = self.get_dkim_records(custom_domain)
for prefix, expected_record in expected_records.items():
for prefix, expected_record in self.get_dkim_records():
custom_record = f"{prefix}.{custom_domain.domain}"
dkim_record = self._dns_client.get_cname_record(custom_record)
if dkim_record == expected_record:
correct_records[prefix] = custom_record
else:
dkim_record = get_cname_record(custom_record)
if dkim_record != expected_record:
invalid_records[custom_record] = dkim_record or "empty"
# HACK
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
# the domain validated to continue seeing it as validated (although showing them the missing records).
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
# we will remove the dkim_verified flag.
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
if custom_domain.dkim_verified:
# Check if at least the original dkim is there
if correct_records.get("dkim._domainkey") is not None:
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
return invalid_records
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
# rest of the code path, returning the invalid records and clearing the flag
return invalid_records
custom_domain.dkim_verified = len(invalid_records) == 0
Session.commit()
return invalid_records
def validate_domain_ownership(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
"""
Check if the custom_domain has added the ownership verification records
"""
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
expected_verification_record = self.get_ownership_verification_record(
custom_domain
)
if expected_verification_record in txt_records:
custom_domain.ownership_verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
return DomainValidationResult(success=False, errors=txt_records)
def validate_mx_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
expected_mx_records = self.get_expected_mx_records(custom_domain)
if not is_mx_equivalent(mx_domains, expected_mx_records):
return DomainValidationResult(
success=False,
errors=[f"{record.priority} {record.domain}" for record in mx_domains],
)
else:
custom_domain.verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
def validate_spf_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
if expected_spf_domain in spf_domains:
custom_domain.spf_verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
custom_domain.spf_verified = False
Session.commit()
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
return DomainValidationResult(
success=False,
errors=cleaned_records,
)
def validate_dmarc_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
if DMARC_RECORD in txt_records:
custom_domain.dmarc_verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
custom_domain.dmarc_verified = False
Session.commit()
return DomainValidationResult(success=False, errors=txt_records)
def __clean_spf_records(
self, txt_records: List[str], custom_domain: CustomDomain
) -> List[str]:
final_records = []
verification_record = self.get_ownership_verification_record(custom_domain)
for record in txt_records:
if record != verification_record:
final_records.append(record)
return final_records

View File

@ -32,7 +32,6 @@ from .views import (
delete_account,
notification,
support,
account_setting,
)
__all__ = [
@ -69,5 +68,4 @@ __all__ = [
"delete_account",
"notification",
"support",
"account_setting",
]

View File

@ -1,242 +0,0 @@
import arrow
from flask import (
render_template,
request,
redirect,
url_for,
flash,
)
from flask_login import login_required, current_user
from app import email_utils
from app.config import (
URL,
FIRST_ALIAS_DOMAIN,
ALIAS_RANDOM_SUFFIX_LENGTH,
CONNECT_WITH_PROTON,
)
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.dashboard.views.mailbox_detail import ChangeEmailForm
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
personal_email_already_used,
)
from app.extensions import limiter
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import (
BlockBehaviourEnum,
PlanEnum,
ResetPasswordCode,
EmailChange,
User,
Alias,
AliasGeneratorEnum,
SenderFormatEnum,
UnsubscribeBehaviourEnum,
)
from app.proton.utils import perform_proton_account_unlink
from app.utils import (
random_string,
CSRFValidationForm,
canonicalize_email,
)
@dashboard_bp.route("/account_setting", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("5/minute", methods=["POST"])
def account_setting():
change_email_form = ChangeEmailForm()
csrf_form = CSRFValidationForm()
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
pending_email = email_change.new_email
else:
pending_email = None
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
if request.form.get("form-name") == "update-email":
if change_email_form.validate():
# whether user can proceed with the email update
new_email_valid = True
new_email = canonicalize_email(change_email_form.email.data)
if new_email != current_user.email and not pending_email:
# check if this email is not already used
if personal_email_already_used(new_email) or Alias.get_by(
email=new_email
):
flash(f"Email {new_email} already used", "error")
new_email_valid = False
elif not email_can_be_used_as_mailbox(new_email):
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
# a pending email change with the same email exists from another user
elif EmailChange.get_by(new_email=new_email):
other_email_change: EmailChange = EmailChange.get_by(
new_email=new_email
)
LOG.w(
"Another user has a pending %s with the same email address. Current user:%s",
other_email_change,
current_user,
)
if other_email_change.is_expired():
LOG.d(
"delete the expired email change %s", other_email_change
)
EmailChange.delete(other_email_change.id)
Session.commit()
else:
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
if new_email_valid:
email_change = EmailChange.create(
user_id=current_user.id,
code=random_string(
60
), # todo: make sure the code is unique
new_email=new_email,
)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash(
"A confirmation email is on the way, please check your inbox",
"success",
)
return redirect(url_for("dashboard.account_setting"))
elif request.form.get("form-name") == "change-password":
flash(
"You are going to receive an email containing instructions to change your password",
"success",
)
send_reset_password_email(current_user)
return redirect(url_for("dashboard.account_setting"))
elif request.form.get("form-name") == "send-full-user-report":
if ExportUserDataJob(current_user).store_job_in_db():
flash(
"You will receive your SimpleLogin data via email shortly",
"success",
)
else:
flash("An export of your data is currently in progress", "error")
partner_sub = None
partner_name = None
return render_template(
"dashboard/account_setting.html",
csrf_form=csrf_form,
PlanEnum=PlanEnum,
SenderFormatEnum=SenderFormatEnum,
BlockBehaviourEnum=BlockBehaviourEnum,
change_email_form=change_email_form,
pending_email=pending_email,
AliasGeneratorEnum=AliasGeneratorEnum,
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
partner_sub=partner_sub,
partner_name=partner_name,
FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
connect_with_proton=CONNECT_WITH_PROTON,
)
def send_reset_password_email(user):
"""
generate a new ResetPasswordCode and send it over email to user
"""
# the activation code is valid for 1h
reset_password_code = ResetPasswordCode.create(
user_id=user.id, code=random_string(60)
)
Session.commit()
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
email_utils.send_reset_password_email(user, reset_password_link)
def send_change_email_confirmation(user: User, email_change: EmailChange):
"""
send confirmation email to the new email address
"""
link = f"{URL}/auth/change_email?code={email_change.code}"
email_utils.send_change_email(user, email_change.new_email, link)
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
@limiter.limit("5/hour")
@login_required
@sudo_required
def resend_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
# extend email change expiration
email_change.expired = arrow.now().shift(hours=12)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash("A confirmation email is on the way, please check your inbox", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
@login_required
@sudo_required
def cancel_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
EmailChange.delete(email_change.id)
Session.commit()
flash("Your email change is cancelled", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
@login_required
@sudo_required
def unlink_proton_account():
csrf_form = CSRFValidationForm()
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
perform_proton_account_unlink(current_user)
flash("Your Proton account has been unlinked", "success")
return redirect(url_for("dashboard.setting"))

View File

@ -9,10 +9,13 @@ from sqlalchemy import and_, func, case
from wtforms import StringField, validators, ValidationError
# Need to import directly from config to allow modification from the tests
from app import config, parallel_limiter, contact_utils
from app.contact_utils import ContactCreateError
from app import config, parallel_limiter
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
generate_reply_email,
parse_full_address,
)
from app.email_validation import is_valid_email
from app.errors import (
CannotCreateContactForReverseAlias,
@ -21,8 +24,8 @@ from app.errors import (
ErrContactAlreadyExists,
)
from app.log import LOG
from app.models import Alias, Contact, EmailLog
from app.utils import CSRFValidationForm
from app.models import Alias, Contact, EmailLog, User
from app.utils import sanitize_email, CSRFValidationForm
def email_validator():
@ -48,7 +51,7 @@ def email_validator():
return _check
def create_contact(alias: Alias, contact_address: str) -> Contact:
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
"""
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
Can throw exceptions:
@ -58,23 +61,37 @@ def create_contact(alias: Alias, contact_address: str) -> Contact:
"""
if not contact_address:
raise ErrAddressInvalid("Empty address")
output = contact_utils.create_contact(email=contact_address, alias=alias)
if output.error == ContactCreateError.InvalidEmail:
try:
contact_name, contact_email = parse_full_address(contact_address)
except ValueError:
raise ErrAddressInvalid(contact_address)
elif output.error == ContactCreateError.NotAllowed:
raise ErrContactErrorUpgradeNeeded()
elif output.error is not None:
raise ErrAddressInvalid("Invalid address")
elif not output.created:
raise ErrContactAlreadyExists(output.contact)
contact = output.contact
contact_email = sanitize_email(contact_email)
if not is_valid_email(contact_email):
raise ErrAddressInvalid(contact_email)
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
raise ErrContactAlreadyExists(contact)
if not user.can_create_contacts():
raise ErrContactErrorUpgradeNeeded()
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
reply_email=generate_reply_email(contact_email, alias),
)
LOG.d(
"create reverse-alias for %s %s, reverse alias:%s",
contact_address,
alias,
contact.reply_email,
)
Session.commit()
return contact
@ -244,7 +261,7 @@ def alias_contact_manager(alias_id):
if new_contact_form.validate():
contact_address = new_contact_form.email.data.strip()
try:
contact = create_contact(alias, contact_address)
contact = create_contact(current_user, alias, contact_address)
except (
ErrContactErrorUpgradeNeeded,
ErrAddressInvalid,

View File

@ -1,13 +1,9 @@
from app.dashboard.base import dashboard_bp
from flask_login import login_required, current_user
from app.alias_utils import alias_export_csv
from app.dashboard.views.enter_sudo import sudo_required
from app.extensions import limiter
@dashboard_bp.route("/alias_export", methods=["GET"])
@login_required
@sudo_required
@limiter.limit("2/minute")
def alias_export_route():
return alias_export_csv(current_user)

View File

@ -5,9 +5,7 @@ from flask_login import login_required, current_user
from app import s3
from app.config import JOB_BATCH_IMPORT
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import File, BatchImport, Job
from app.utils import random_string, CSRFValidationForm
@ -15,8 +13,6 @@ from app.utils import random_string, CSRFValidationForm
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("10/minute", methods=["POST"])
def batch_import_route():
# only for users who have custom domains
if not current_user.verified_custom_domains():
@ -41,7 +37,7 @@ def batch_import_route():
return redirect(request.url)
if len(batch_imports) > 10:
flash(
"You have too many imports already. Please wait until some get cleaned up",
"You have too many imports already. Wait until some get cleaned up",
"error",
)
return render_template(

View File

@ -5,9 +5,11 @@ from wtforms import StringField, validators
from app import parallel_limiter
from app.config import EMAIL_SERVERS_WITH_PRIORITY
from app.custom_domain_utils import create_custom_domain
from app.dashboard.base import dashboard_bp
from app.models import CustomDomain
from app.db import Session
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
class NewCustomDomainForm(FlaskForm):
@ -21,12 +23,13 @@ class NewCustomDomainForm(FlaskForm):
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
def custom_domain():
custom_domains = CustomDomain.filter_by(
user_id=current_user.id,
is_sl_subdomain=False,
pending_deletion=False,
user_id=current_user.id, is_sl_subdomain=False
).all()
mailboxes = current_user.mailboxes()
new_custom_domain_form = NewCustomDomainForm()
errors = {}
if request.method == "POST":
if request.form.get("form-name") == "create":
if not current_user.is_premium():
@ -34,25 +37,87 @@ def custom_domain():
return redirect(url_for("dashboard.custom_domain"))
if new_custom_domain_form.validate():
res = create_custom_domain(
user=current_user, domain=new_custom_domain_form.domain.data
)
if res.success:
flash(f"New domain {res.instance.domain} is created", "success")
new_domain = new_custom_domain_form.domain.data.lower().strip()
if new_domain.startswith("http://"):
new_domain = new_domain[len("http://") :]
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
if SLDomain.get_by(domain=new_domain):
flash("A custom domain cannot be a built-in domain.", "error")
elif CustomDomain.get_by(domain=new_domain):
flash(f"{new_domain} already used", "error")
elif get_email_domain_part(current_user.email) == new_domain:
flash(
"You cannot add a domain that you are currently using for your personal email. "
"Please change your personal email to your real email",
"error",
)
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
).first():
flash(
f"{new_domain} already used in a SimpleLogin mailbox", "error"
)
else:
new_custom_domain = CustomDomain.create(
domain=new_domain, user_id=current_user.id
)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in current_user.custom_domains:
if (
new_domain.endswith("." + root_cd.domain)
and root_cd.ownership_verified
):
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
Session.commit()
mailbox_ids = request.form.getlist("mailbox_ids")
if mailbox_ids:
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(url_for("dashboard.custom_domain"))
mailboxes.append(mailbox)
for mailbox in mailboxes:
DomainMailbox.create(
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
)
Session.commit()
flash(
f"New domain {new_custom_domain.domain} is created", "success"
)
return redirect(
url_for(
"dashboard.domain_detail_dns",
custom_domain_id=res.instance.id,
custom_domain_id=new_custom_domain.id,
)
)
else:
flash(res.message, res.message_category)
if res.redirect:
return redirect(url_for(res.redirect))
return render_template(
"dashboard/custom_domain.html",
custom_domains=custom_domains,
new_custom_domain_form=new_custom_domain_form,
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
errors=errors,
mailboxes=mailboxes,
)

View File

@ -1,23 +1,31 @@
import re
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators, IntegerField
from app.constants import DMARC_RECORD
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
from app.custom_domain_validation import CustomDomainValidation
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.dns_utils import (
get_mx_domains,
get_spf_domain,
get_txt_record,
is_mx_equivalent,
)
from app.log import LOG
from app.models import (
CustomDomain,
Alias,
DomainDeletedAlias,
Mailbox,
DomainMailbox,
AutoCreateRule,
AutoCreateRuleMailbox,
Job,
)
from app.regex_utils import regex_match
from app.utils import random_string, CSRFValidationForm
@ -36,9 +44,13 @@ def domain_detail_dns(custom_domain_id):
custom_domain.ownership_txt_token = random_string(30)
Session.commit()
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
csrf_form = CSRFValidationForm()
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
@ -47,14 +59,15 @@ def domain_detail_dns(custom_domain_id):
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "check-ownership":
ownership_validation_result = domain_validator.validate_domain_ownership(
custom_domain
)
if ownership_validation_result.success:
txt_records = get_txt_record(custom_domain.domain)
if custom_domain.get_ownership_dns_txt_value() in txt_records:
flash(
"Domain ownership is verified. Please proceed to the other records setup",
"success",
)
custom_domain.ownership_verified = True
Session.commit()
return redirect(
url_for(
"dashboard.domain_detail_dns",
@ -65,28 +78,36 @@ def domain_detail_dns(custom_domain_id):
else:
flash("We can't find the needed TXT record", "error")
ownership_ok = False
ownership_errors = ownership_validation_result.errors
ownership_errors = txt_records
elif request.form.get("form-name") == "check-mx":
mx_validation_result = domain_validator.validate_mx_records(custom_domain)
if mx_validation_result.success:
mx_domains = get_mx_domains(custom_domain.domain)
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
flash("The MX record is not correctly set", "warning")
mx_ok = False
# build mx_errors to show to user
mx_errors = [
f"{priority} {domain}" for (priority, domain) in mx_domains
]
else:
flash(
"Your domain can start receiving emails. You can now use it to create alias",
"success",
)
custom_domain.verified = True
Session.commit()
return redirect(
url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
)
)
else:
flash("The MX record is not correctly set", "warning")
mx_ok = False
mx_errors = mx_validation_result.errors
elif request.form.get("form-name") == "check-spf":
spf_validation_result = domain_validator.validate_spf_records(custom_domain)
if spf_validation_result.success:
spf_domains = get_spf_domain(custom_domain.domain)
if EMAIL_DOMAIN in spf_domains:
custom_domain.spf_verified = True
Session.commit()
flash("SPF is setup correctly", "success")
return redirect(
url_for(
@ -94,12 +115,14 @@ def domain_detail_dns(custom_domain_id):
)
)
else:
custom_domain.spf_verified = False
Session.commit()
flash(
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
"warning",
)
spf_ok = False
spf_errors = spf_validation_result.errors
spf_errors = get_txt_record(custom_domain.domain)
elif request.form.get("form-name") == "check-dkim":
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
@ -115,10 +138,10 @@ def domain_detail_dns(custom_domain_id):
flash("DKIM: the CNAME record is not correctly set", "warning")
elif request.form.get("form-name") == "check-dmarc":
dmarc_validation_result = domain_validator.validate_dmarc_records(
custom_domain
)
if dmarc_validation_result.success:
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
if dmarc_record in txt_records:
custom_domain.dmarc_verified = True
Session.commit()
flash("DMARC is setup correctly", "success")
return redirect(
url_for(
@ -126,23 +149,19 @@ def domain_detail_dns(custom_domain_id):
)
)
else:
custom_domain.dmarc_verified = False
Session.commit()
flash(
"DMARC: The TXT record is not correctly set",
"warning",
)
dmarc_ok = False
dmarc_errors = dmarc_validation_result.errors
dmarc_errors = txt_records
return render_template(
"dashboard/domain_detail/dns.html",
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
ownership_record=domain_validator.get_ownership_verification_record(
custom_domain
),
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
dkim_records=domain_validator.get_dkim_records(custom_domain),
spf_record=domain_validator.get_expected_spf_record(custom_domain),
dmarc_record=DMARC_RECORD,
dkim_records=domain_validator.get_dkim_records(),
**locals(),
)
@ -219,16 +238,40 @@ def domain_detail(custom_domain_id):
)
elif request.form.get("form-name") == "update":
mailbox_ids = request.form.getlist("mailbox_ids")
result = set_custom_domain_mailboxes(
user_id=current_user.id,
custom_domain=custom_domain,
mailbox_ids=mailbox_ids,
)
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(
url_for(
"dashboard.domain_detail", custom_domain_id=custom_domain.id
)
)
mailboxes.append(mailbox)
if result.success:
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
else:
flash(result.reason.value, "warning")
if not mailboxes:
flash("You must select at least 1 mailbox", "warning")
return redirect(
url_for(
"dashboard.domain_detail", custom_domain_id=custom_domain.id
)
)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
Session.commit()
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
return redirect(
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
@ -236,8 +279,16 @@ def domain_detail(custom_domain_id):
elif request.form.get("form-name") == "delete":
name = custom_domain.domain
LOG.d("Schedule deleting %s", custom_domain)
delete_custom_domain(custom_domain)
# Schedule delete domain job
LOG.w("schedule delete domain job for %s", custom_domain)
Job.create(
name=JOB_DELETE_DOMAIN,
payload={"custom_domain_id": custom_domain.id},
run_at=arrow.now(),
commit=True,
)
flash(
f"{name} scheduled for deletion."

View File

@ -6,15 +6,15 @@ from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import PasswordField, validators
from app.config import CONNECT_WITH_PROTON, OIDC_CLIENT_ID, CONNECT_WITH_OIDC_ICON
from app.config import CONNECT_WITH_PROTON
from app.dashboard.base import dashboard_bp
from app.extensions import limiter
from app.log import LOG
from app.models import PartnerUser, SocialAuth
from app.models import PartnerUser
from app.proton.utils import get_proton_partner
from app.utils import sanitize_next_url
_SUDO_GAP = 120
_SUDO_GAP = 900
class LoginForm(FlaskForm):
@ -51,19 +51,11 @@ def enter_sudo():
if not partner_user or partner_user.partner_id != get_proton_partner().id:
proton_enabled = False
oidc_enabled = OIDC_CLIENT_ID is not None
if oidc_enabled:
oidc_enabled = (
SocialAuth.get_by(user_id=current_user.id, social="oidc") is not None
)
return render_template(
"dashboard/enter_sudo.html",
password_check_form=password_check_form,
next=request.args.get("next"),
connect_with_proton=proton_enabled,
connect_with_oidc=oidc_enabled,
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
)

View File

@ -12,7 +12,6 @@ from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
AliasDeleteReason,
AliasGeneratorEnum,
User,
EmailLog,
@ -142,14 +141,12 @@ def index():
)
if request.form.get("form-name") == "delete-alias":
LOG.i(f"User {current_user} requested deletion of alias {alias}")
LOG.d("delete alias %s", alias)
email = alias.email
alias_utils.delete_alias(
alias, current_user, AliasDeleteReason.ManualAction, commit=True
)
alias_utils.delete_alias(alias, current_user)
flash(f"Alias {email} has been deleted", "success")
elif request.form.get("form-name") == "disable-alias":
alias_utils.change_alias_status(alias, enabled=False)
alias.enabled = False
Session.commit()
flash(f"Alias {alias.email} has been disabled", "success")

View File

@ -2,6 +2,7 @@ import base64
import binascii
import json
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
@ -9,12 +10,19 @@ from itsdangerous import TimestampSigner
from wtforms import validators, IntegerField
from wtforms.fields.html5 import EmailField
from app import parallel_limiter, mailbox_utils, user_settings
from app.config import MAILBOX_SECRET
from app import parallel_limiter
from app.config import MAILBOX_SECRET, URL, JOB_DELETE_MAILBOX
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
mailbox_already_used,
render,
send_email,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox
from app.models import Mailbox, Job
from app.utils import CSRFValidationForm
@ -50,61 +58,120 @@ def mailbox_route():
if not delete_mailbox_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
try:
mailbox = mailbox_utils.delete_mailbox(
current_user,
delete_mailbox_form.mailbox_id.data,
delete_mailbox_form.transfer_mailbox_id.data,
)
except mailbox_utils.MailboxError as e:
flash(e.msg, "warning")
mailbox = Mailbox.get(delete_mailbox_form.mailbox_id.data)
if not mailbox or mailbox.user_id != current_user.id:
flash("Invalid mailbox. Refresh the page", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("You cannot delete default mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
transfer_mailbox_id = delete_mailbox_form.transfer_mailbox_id.data
if transfer_mailbox_id and transfer_mailbox_id > 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
if not transfer_mailbox or transfer_mailbox.user_id != current_user.id:
flash(
"You must transfer the aliases to a mailbox you own.", "error"
)
return redirect(url_for("dashboard.mailbox_route"))
if transfer_mailbox.id == mailbox.id:
flash(
"You can not transfer the aliases to the mailbox you want to delete.",
"error",
)
return redirect(url_for("dashboard.mailbox_route"))
if not transfer_mailbox.verified:
flash("Your new mailbox is not verified", "error")
return redirect(url_for("dashboard.mailbox_route"))
# Schedule delete account job
LOG.w(
f"schedule delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id
if transfer_mailbox_id > 0
else None,
},
run_at=arrow.now(),
commit=True,
)
flash(
f"Mailbox {mailbox.email} scheduled for deletion."
f"You will receive a confirmation email when the deletion is finished",
"success",
)
return redirect(url_for("dashboard.mailbox_route"))
return redirect(url_for("dashboard.mailbox_route"))
if request.form.get("form-name") == "set-default":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
try:
mailbox_id = request.form.get("mailbox_id")
mailbox = user_settings.set_default_mailbox(current_user, mailbox_id)
except user_settings.CannotSetMailbox as e:
flash(e.msg, "warning")
mailbox_id = request.form.get("mailbox_id")
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("This mailbox is already default one", "error")
return redirect(url_for("dashboard.mailbox_route"))
if not mailbox.verified:
flash("Cannot set unverified mailbox as default", "error")
return redirect(url_for("dashboard.mailbox_route"))
current_user.default_mailbox_id = mailbox.id
Session.commit()
flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success")
return redirect(url_for("dashboard.mailbox_route"))
elif request.form.get("form-name") == "create":
if not new_mailbox_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
mailbox_email = new_mailbox_form.email.data.lower().strip().replace(" ", "")
try:
mailbox = mailbox_utils.create_mailbox(
current_user, mailbox_email
).mailbox
except mailbox_utils.MailboxError as e:
flash(e.msg, "warning")
if not current_user.is_premium():
flash("Only premium plan can add additional mailbox", "warning")
return redirect(url_for("dashboard.mailbox_route"))
flash(
f"You are going to receive an email to confirm {mailbox.email}.",
"success",
)
return redirect(
url_for(
"dashboard.mailbox_detail_route",
mailbox_id=mailbox.id,
if new_mailbox_form.validate():
mailbox_email = (
new_mailbox_form.email.data.lower().strip().replace(" ", "")
)
)
if not is_valid_email(mailbox_email):
flash(f"{mailbox_email} invalid", "error")
elif mailbox_already_used(mailbox_email, current_user):
flash(f"{mailbox_email} already used", "error")
elif not email_can_be_used_as_mailbox(mailbox_email):
flash(f"You cannot use {mailbox_email}.", "error")
else:
new_mailbox = Mailbox.create(
email=mailbox_email, user_id=current_user.id
)
Session.commit()
send_verification_email(current_user, new_mailbox)
flash(
f"You are going to receive an email to confirm {mailbox_email}.",
"success",
)
return redirect(
url_for(
"dashboard.mailbox_detail_route",
mailbox_id=new_mailbox.id,
)
)
return render_template(
"dashboard/mailbox.html",
@ -115,25 +182,34 @@ def mailbox_route():
)
def send_verification_email(user, mailbox):
s = TimestampSigner(MAILBOX_SECRET)
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
b64_data = base64.urlsafe_b64encode(encoded_data)
mailbox_id_signed = s.sign(b64_data).decode()
verification_url = (
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
)
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
)
@dashboard_bp.route("/mailbox_verify")
@login_required
def mailbox_verify():
mailbox_id = request.args.get("mailbox_id")
code = request.args.get("code")
if not code:
# Old way
return verify_with_signed_secret(mailbox_id)
try:
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
except mailbox_utils.MailboxError as e:
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
flash(f"Cannot verify mailbox: {e.msg}", "error")
return redirect(url_for("dashboard.mailbox_route"))
LOG.d("Mailbox %s is verified", mailbox)
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
def verify_with_signed_secret(request: str):
s = TimestampSigner(MAILBOX_SECRET)
mailbox_verify_request = request.args.get("mailbox_id")
try:

View File

@ -11,11 +11,9 @@ from wtforms.fields.html5 import EmailField
from app.config import ENFORCE_SPF, MAILBOX_SECRET
from app.config import URL
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.email_utils import email_can_be_used_as_mailbox
from app.email_utils import mailbox_already_used, render, send_email
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, AuthorizedAddress
from app.models import Mailbox
@ -31,8 +29,6 @@ class ChangeEmailForm(FlaskForm):
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("20/minute", methods=["POST"])
def mailbox_detail_route(mailbox_id):
mailbox: Mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
@ -183,15 +179,8 @@ def mailbox_detail_route(mailbox_id):
elif request.form.get("form-name") == "toggle-pgp":
if request.form.get("pgp-enabled") == "on":
if mailbox.is_proton():
mailbox.disable_pgp = True
flash(
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
"info",
)
else:
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "info")
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "success")
else:
mailbox.disable_pgp = True
flash(f"PGP is disabled on {mailbox.email}", "info")

View File

@ -13,38 +13,51 @@ from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from flask_wtf.file import FileField
from wtforms import StringField, validators
from wtforms.fields.html5 import EmailField
from app import s3, user_settings
from app import s3, email_utils
from app.config import (
URL,
FIRST_ALIAS_DOMAIN,
ALIAS_RANDOM_SUFFIX_LENGTH,
CONNECT_WITH_PROTON,
)
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
personal_email_already_used,
)
from app.errors import ProtonPartnerNotSetUp
from app.extensions import limiter
from app.image_validation import detect_image_format, ImageFormat
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import (
BlockBehaviourEnum,
PlanEnum,
File,
ResetPasswordCode,
EmailChange,
User,
Alias,
CustomDomain,
AliasGeneratorEnum,
AliasSuffixEnum,
ManualSubscription,
SenderFormatEnum,
SLDomain,
CoinbaseSubscription,
AppleSubscription,
PartnerUser,
PartnerSubscription,
UnsubscribeBehaviourEnum,
)
from app.proton.utils import get_proton_partner
from app.proton.utils import get_proton_partner, perform_proton_account_unlink
from app.utils import (
random_string,
CSRFValidationForm,
canonicalize_email,
)
@ -53,6 +66,12 @@ class SettingForm(FlaskForm):
profile_picture = FileField("Profile Picture")
class ChangeEmailForm(FlaskForm):
email = EmailField(
"email", validators=[validators.DataRequired(), validators.Email()]
)
class PromoCodeForm(FlaskForm):
code = StringField("Name", validators=[validators.DataRequired()])
@ -90,6 +109,7 @@ def get_partner_subscription_and_name(
def setting():
form = SettingForm()
promo_form = PromoCodeForm()
change_email_form = ChangeEmailForm()
csrf_form = CSRFValidationForm()
email_change = EmailChange.get_by(user_id=current_user.id)
@ -102,7 +122,63 @@ def setting():
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
if request.form.get("form-name") == "update-email":
if change_email_form.validate():
# whether user can proceed with the email update
new_email_valid = True
new_email = canonicalize_email(change_email_form.email.data)
if new_email != current_user.email and not pending_email:
# check if this email is not already used
if personal_email_already_used(new_email) or Alias.get_by(
email=new_email
):
flash(f"Email {new_email} already used", "error")
new_email_valid = False
elif not email_can_be_used_as_mailbox(new_email):
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
# a pending email change with the same email exists from another user
elif EmailChange.get_by(new_email=new_email):
other_email_change: EmailChange = EmailChange.get_by(
new_email=new_email
)
LOG.w(
"Another user has a pending %s with the same email address. Current user:%s",
other_email_change,
current_user,
)
if other_email_change.is_expired():
LOG.d(
"delete the expired email change %s", other_email_change
)
EmailChange.delete(other_email_change.id)
Session.commit()
else:
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
if new_email_valid:
email_change = EmailChange.create(
user_id=current_user.id,
code=random_string(
60
), # todo: make sure the code is unique
new_email=new_email,
)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash(
"A confirmation email is on the way, please check your inbox",
"success",
)
return redirect(url_for("dashboard.setting"))
if request.form.get("form-name") == "update-profile":
if form.validate():
profile_updated = False
@ -146,6 +222,15 @@ def setting():
if profile_updated:
flash("Your profile has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-password":
flash(
"You are going to receive an email containing instructions to change your password",
"success",
)
send_reset_password_email(current_user)
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "notification-preference":
choose = request.form.get("notification")
if choose == "on":
@ -155,6 +240,7 @@ def setting():
Session.commit()
flash("Your notification preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-alias-generator":
scheme = int(request.form.get("alias-generator-scheme"))
if AliasGeneratorEnum.has_value(scheme):
@ -162,17 +248,46 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-random-alias-default-domain":
default_domain = request.form.get("random-alias-default-domain")
try:
user_settings.set_default_alias_domain(current_user, default_domain)
except user_settings.CannotSetAlias as e:
flash(e.msg, "error")
return redirect(url_for("dashboard.setting"))
if default_domain:
sl_domain: SLDomain = SLDomain.get_by(domain=default_domain)
if sl_domain:
if sl_domain.premium_only and not current_user.is_premium():
flash("You cannot use this domain", "error")
return redirect(url_for("dashboard.setting"))
current_user.default_alias_public_domain_id = sl_domain.id
current_user.default_alias_custom_domain_id = None
else:
custom_domain = CustomDomain.get_by(domain=default_domain)
if custom_domain:
# sanity check
if (
custom_domain.user_id != current_user.id
or not custom_domain.verified
):
LOG.w(
"%s cannot use domain %s", current_user, custom_domain
)
flash(f"Domain {default_domain} can't be used", "error")
return redirect(request.url)
else:
current_user.default_alias_custom_domain_id = (
custom_domain.id
)
current_user.default_alias_public_domain_id = None
else:
current_user.default_alias_custom_domain_id = None
current_user.default_alias_public_domain_id = None
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "random-alias-suffix":
scheme = int(request.form.get("random-alias-suffix-generator"))
if AliasSuffixEnum.has_value(scheme):
@ -180,6 +295,7 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-sender-format":
sender_format = int(request.form.get("sender-format"))
if SenderFormatEnum.has_value(sender_format):
@ -189,6 +305,7 @@ def setting():
flash("Your sender format preference has been updated", "success")
Session.commit()
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "replace-ra":
choose = request.form.get("replace-ra")
if choose == "on":
@ -198,21 +315,7 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "enable_data_breach_check":
if not current_user.is_premium():
flash("Only premium plan can enable data breach monitoring", "warning")
return redirect(url_for("dashboard.setting"))
choose = request.form.get("enable_data_breach_check")
if choose == "on":
LOG.i("User {current_user} has enabled data breach monitoring")
current_user.enable_data_breach_check = True
flash("Data breach monitoring is enabled", "success")
else:
LOG.i("User {current_user} has disabled data breach monitoring")
current_user.enable_data_breach_check = False
flash("Data breach monitoring is disabled", "info")
Session.commit()
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "sender-in-ra":
choose = request.form.get("enable")
if choose == "on":
@ -222,6 +325,7 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "expand-alias-info":
choose = request.form.get("enable")
if choose == "on":
@ -283,6 +387,14 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "send-full-user-report":
if ExportUserDataJob(current_user).store_job_in_db():
flash(
"You will receive your SimpleLogin data via email shortly",
"success",
)
else:
flash("An export of your data is currently in progress", "error")
manual_sub = ManualSubscription.get_by(user_id=current_user.id)
apple_sub = AppleSubscription.get_by(user_id=current_user.id)
@ -305,6 +417,7 @@ def setting():
SenderFormatEnum=SenderFormatEnum,
BlockBehaviourEnum=BlockBehaviourEnum,
promo_form=promo_form,
change_email_form=change_email_form,
pending_email=pending_email,
AliasGeneratorEnum=AliasGeneratorEnum,
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
@ -319,3 +432,85 @@ def setting():
connect_with_proton=CONNECT_WITH_PROTON,
proton_linked_account=proton_linked_account,
)
def send_reset_password_email(user):
"""
generate a new ResetPasswordCode and send it over email to user
"""
# the activation code is valid for 1h
reset_password_code = ResetPasswordCode.create(
user_id=user.id, code=random_string(60)
)
Session.commit()
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
email_utils.send_reset_password_email(user.email, reset_password_link)
def send_change_email_confirmation(user: User, email_change: EmailChange):
"""
send confirmation email to the new email address
"""
link = f"{URL}/auth/change_email?code={email_change.code}"
email_utils.send_change_email(email_change.new_email, user.email, link)
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
@limiter.limit("5/hour")
@login_required
def resend_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
# extend email change expiration
email_change.expired = arrow.now().shift(hours=12)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash("A confirmation email is on the way, please check your inbox", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
@login_required
def cancel_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
EmailChange.delete(email_change.id)
Session.commit()
flash("Your email change is cancelled", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
@login_required
def unlink_proton_account():
csrf_form = CSRFValidationForm()
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
perform_proton_account_unlink(current_user)
flash("Your Proton account has been unlinked", "success")
return redirect(url_for("dashboard.setting"))

View File

@ -8,7 +8,6 @@ from app.db import Session
from flask import redirect, url_for, flash, request, render_template
from flask_login import login_required, current_user
from app import alias_utils
from app.dashboard.base import dashboard_bp
from app.handler.unsubscribe_encoder import UnsubscribeAction
from app.handler.unsubscribe_handler import UnsubscribeHandler
@ -32,7 +31,7 @@ def unsubscribe(alias_id):
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
if request.method == "POST":
alias_utils.change_alias_status(alias, False)
alias.enabled = False
flash(f"Alias {alias.email} has been blocked", "success")
Session.commit()

View File

@ -1,5 +1,4 @@
from io import BytesIO
from urllib.parse import urlparse
from flask import request, render_template, redirect, url_for, flash
from flask_login import current_user, login_required
@ -12,7 +11,6 @@ from app.config import ADMIN_EMAIL
from app.db import Session
from app.developer.base import developer_bp
from app.email_utils import send_email
from app.image_validation import detect_image_format, ImageFormat
from app.log import LOG
from app.models import Client, RedirectUri, File, Referral
from app.utils import random_string
@ -48,25 +46,16 @@ def client_detail(client_id):
approval_form.description.data = client.description
if action == "edit" and form.validate_on_submit():
parsed_url = urlparse(form.url.data)
if parsed_url.scheme != "https":
flash("Only https urls are allowed", "error")
return redirect(url_for("developer.index"))
client.name = form.name.data
client.home_url = form.url.data
if form.icon.data:
icon_data = form.icon.data.read(10240)
if detect_image_format(icon_data) == ImageFormat.Unknown:
flash("Unknown file format", "warning")
return redirect(url_for("developer.index"))
if client.icon:
s3.delete(client.icon_id)
File.delete(client.icon)
# todo: remove current icon if any
# todo: handle remove icon
file_path = random_string(30)
file = File.create(path=file_path, user_id=client.user_id)
s3.upload_from_bytesio(file_path, BytesIO(icon_data))
s3.upload_from_bytesio(file_path, BytesIO(form.icon.data.read()))
Session.flush()
LOG.d("upload file %s to s3", file)

View File

@ -1,5 +1,3 @@
from urllib.parse import urlparse
from flask import render_template, redirect, url_for, flash
from flask_login import current_user, login_required
from flask_wtf import FlaskForm
@ -22,10 +20,6 @@ def new_client():
if form.validate_on_submit():
client = Client.create_new(form.name.data, current_user.id)
parsed_url = urlparse(form.url.data)
if parsed_url.scheme != "https":
flash("Only https urls are allowed", "error")
return redirect(url_for("developer.new_client"))
client.home_url = form.url.data
Session.commit()

View File

@ -1,22 +1,102 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import List, Optional
from app import config
from typing import Optional, List, Tuple
import dns.resolver
from app.config import NAMESERVERS
def _get_dns_resolver():
my_resolver = dns.resolver.Resolver()
my_resolver.nameservers = config.NAMESERVERS
return my_resolver
def get_ns(hostname) -> [str]:
try:
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
except Exception:
return []
return [a.to_text() for a in answers]
def get_cname_record(hostname) -> Optional[str]:
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
try:
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
except Exception:
return None
for a in answers:
ret = a.to_text()
return ret[:-1]
return None
def get_mx_domains(hostname) -> [(int, str)]:
"""return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
try:
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
except Exception:
return []
ret = []
for a in answers:
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
parts = record.split(" ")
ret.append((int(parts[0]), parts[1]))
return sorted(ret, key=lambda prio_domain: prio_domain[0])
_include_spf = "include:"
@dataclass
class MxRecord:
priority: int
domain: str
def get_spf_domain(hostname) -> [str]:
"""return all domains listed in *include:*"""
try:
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
except Exception:
return []
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
record = record.decode() # record is bytes
if record.startswith("v=spf1"):
parts = record.split(" ")
for part in parts:
if part.startswith(_include_spf):
ret.append(part[part.find(_include_spf) + len(_include_spf) :])
return ret
def get_txt_record(hostname) -> [str]:
try:
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
except Exception:
return []
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
record = record.decode() # record is bytes
ret.append(record)
return ret
def is_mx_equivalent(
mx_domains: List[MxRecord], ref_mx_domains: List[MxRecord]
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
) -> bool:
"""
Compare mx_domains with ref_mx_domains to see if they are equivalent.
@ -25,127 +105,16 @@ def is_mx_equivalent(
The priority order is taken into account but not the priority number.
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
"""
mx_domains = sorted(mx_domains, key=lambda x: x.priority)
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x.priority)
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
ref_mx_domains = sorted(
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
)
if len(mx_domains) < len(ref_mx_domains):
return False
for actual, expected in zip(mx_domains, ref_mx_domains):
if actual.domain != expected.domain:
for i in range(0, len(ref_mx_domains)):
if mx_domains[i][1] != ref_mx_domains[i][1]:
return False
return True
class DNSClient(ABC):
@abstractmethod
def get_cname_record(self, hostname: str) -> Optional[str]:
pass
@abstractmethod
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
pass
def get_spf_domain(self, hostname: str) -> List[str]:
"""
return all domains listed in *include:*
"""
try:
records = self.get_txt_record(hostname)
ret = []
for record in records:
if record.startswith("v=spf1"):
parts = record.split(" ")
for part in parts:
if part.startswith(_include_spf):
ret.append(
part[part.find(_include_spf) + len(_include_spf) :]
)
return ret
except Exception:
return []
@abstractmethod
def get_txt_record(self, hostname: str) -> List[str]:
pass
class NetworkDNSClient(DNSClient):
def __init__(self, nameservers: List[str]):
self._resolver = dns.resolver.Resolver()
self._resolver.nameservers = nameservers
def get_cname_record(self, hostname: str) -> Optional[str]:
"""
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
"""
try:
answers = self._resolver.resolve(hostname, "CNAME", search=True)
for a in answers:
ret = a.to_text()
return ret[:-1]
except Exception:
return None
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
"""
return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
try:
answers = self._resolver.resolve(hostname, "MX", search=True)
ret = []
for a in answers:
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
parts = record.split(" ")
ret.append(MxRecord(priority=int(parts[0]), domain=parts[1]))
return sorted(ret, key=lambda x: x.priority)
except Exception:
return []
def get_txt_record(self, hostname: str) -> List[str]:
try:
answers = self._resolver.resolve(hostname, "TXT", search=False)
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
ret.append(record.decode())
return ret
except Exception:
return []
class InMemoryDNSClient(DNSClient):
def __init__(self):
self.cname_records: dict[str, Optional[str]] = {}
self.mx_records: dict[str, List[MxRecord]] = {}
self.spf_records: dict[str, List[str]] = {}
self.txt_records: dict[str, List[str]] = {}
def set_cname_record(self, hostname: str, cname: str):
self.cname_records[hostname] = cname
def set_mx_records(self, hostname: str, mx_list: List[MxRecord]):
self.mx_records[hostname] = mx_list
def set_txt_record(self, hostname: str, txt_list: List[str]):
self.txt_records[hostname] = txt_list
def get_cname_record(self, hostname: str) -> Optional[str]:
return self.cname_records.get(hostname)
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
mx_list = self.mx_records.get(hostname, [])
return sorted(mx_list, key=lambda x: x.priority)
def get_txt_record(self, hostname: str) -> List[str]:
return self.txt_records.get(hostname, [])
def get_network_dns_client() -> NetworkDNSClient:
return NetworkDNSClient(NAMESERVERS)
def get_mx_domains(hostname: str) -> List[MxRecord]:
return get_network_dns_client().get_mx_domains(hostname)

View File

@ -21,7 +21,6 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
RETURN_PATH = "Return-Path"
AUTHENTICATION_RESULTS = "Authentication-Results"
SL_QUEUE_ID = "X-SL-Queue-Id"
# headers used to DKIM sign in order of preference
DKIM_HEADERS = [

View File

@ -33,7 +33,6 @@ from flanker.addresslib import address
from flanker.addresslib.address import EmailAddress
from jinja2 import Environment, FileSystemLoader
from sqlalchemy import func
from flask_login import current_user
from app import config
from app.db import Session
@ -69,27 +68,17 @@ VERP_TIME_START = 1640995200
VERP_HMAC_ALGO = "sha3-224"
def render(template_name: str, user: Optional[User], **kwargs) -> str:
def render(template_name, **kwargs) -> str:
templates_dir = os.path.join(config.ROOT_DIR, "templates", "emails")
env = Environment(loader=FileSystemLoader(templates_dir))
template = env.get_template(template_name)
if user is None:
if current_user and current_user.is_authenticated:
user = current_user
use_partner_template = False
if user:
use_partner_template = user.has_used_alias_from_partner()
kwargs["user"] = user
return template.render(
MAX_NB_EMAIL_FREE_PLAN=config.MAX_NB_EMAIL_FREE_PLAN,
URL=config.URL,
LANDING_PAGE_URL=config.LANDING_PAGE_URL,
YEAR=arrow.now().year,
USE_PARTNER_TEMPLATE=use_partner_template,
**kwargs,
)
@ -122,59 +111,53 @@ def send_trial_end_soon_email(user):
)
def send_activation_email(user: User, activation_link):
def send_activation_email(email, activation_link):
send_email(
user.email,
email,
"Just one more step to join SimpleLogin",
render(
"transactional/activation.txt",
user=user,
activation_link=activation_link,
email=user.email,
email=email,
),
render(
"transactional/activation.html",
user=user,
activation_link=activation_link,
email=user.email,
email=email,
),
)
def send_reset_password_email(user: User, reset_password_link):
def send_reset_password_email(email, reset_password_link):
send_email(
user.email,
email,
"Reset your password on SimpleLogin",
render(
"transactional/reset-password.txt",
user=user,
reset_password_link=reset_password_link,
),
render(
"transactional/reset-password.html",
user=user,
reset_password_link=reset_password_link,
),
)
def send_change_email(user: User, new_email, link):
def send_change_email(new_email, current_email, link):
send_email(
new_email,
"Confirm email update on SimpleLogin",
render(
"transactional/change-email.txt",
user=user,
link=link,
new_email=new_email,
current_email=user.email,
current_email=current_email,
),
render(
"transactional/change-email.html",
user=user,
link=link,
new_email=new_email,
current_email=user.email,
current_email=current_email,
),
)
@ -187,32 +170,28 @@ def send_invalid_totp_login_email(user, totp_type):
"Unsuccessful attempt to login to your SimpleLogin account",
render(
"transactional/invalid-totp-login.txt",
user=user,
type=totp_type,
),
render(
"transactional/invalid-totp-login.html",
user=user,
type=totp_type,
),
1,
)
def send_test_email_alias(user: User, email: str):
def send_test_email_alias(email, name):
send_email(
email,
f"This email is sent to {email}",
render(
"transactional/test-email.txt",
user=user,
name=user.name,
name=name,
alias=email,
),
render(
"transactional/test-email.html",
user=user,
name=user.name,
name=name,
alias=email,
),
)
@ -227,13 +206,11 @@ def send_cannot_create_directory_alias(user, alias_address, directory_name):
f"Alias {alias_address} cannot be created",
render(
"transactional/cannot-create-alias-directory.txt",
user=user,
alias=alias_address,
directory=directory_name,
),
render(
"transactional/cannot-create-alias-directory.html",
user=user,
alias=alias_address,
directory=directory_name,
),
@ -251,13 +228,11 @@ def send_cannot_create_directory_alias_disabled(user, alias_address, directory_n
f"Alias {alias_address} cannot be created",
render(
"transactional/cannot-create-alias-directory-disabled.txt",
user=user,
alias=alias_address,
directory=directory_name,
),
render(
"transactional/cannot-create-alias-directory-disabled.html",
user=user,
alias=alias_address,
directory=directory_name,
),
@ -273,13 +248,11 @@ def send_cannot_create_domain_alias(user, alias, domain):
f"Alias {alias} cannot be created",
render(
"transactional/cannot-create-alias-domain.txt",
user=user,
alias=alias,
domain=domain,
),
render(
"transactional/cannot-create-alias-domain.html",
user=user,
alias=alias,
domain=domain,
),
@ -521,10 +494,9 @@ def delete_header(msg: Message, header: str):
def sanitize_header(msg: Message, header: str):
"""remove trailing space and remove linebreak from a header"""
header_lowercase = header.lower()
for i in reversed(range(len(msg._headers))):
header_name = msg._headers[i][0].lower()
if header_name == header_lowercase:
if header_name == header.lower():
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
if msg._headers[i][1]:
msg._headers[i] = (
@ -548,9 +520,7 @@ def can_create_directory_for_address(email_address: str) -> bool:
for domain in config.ALIAS_DOMAINS:
if email_address.endswith("@" + domain):
return True
LOG.i(
f"Cannot create address in directory for {email_address} since it does not belong to a valid directory domain"
)
return False
@ -592,7 +562,7 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
from app.models import CustomDomain
if CustomDomain.get_by(domain=domain, is_sl_subdomain=True, verified=True):
if CustomDomain.get_by(domain=domain, verified=True):
LOG.d("domain %s is a SimpleLogin custom domain", domain)
return False
@ -657,7 +627,7 @@ def get_mx_domain_list(domain) -> [str]:
"""
priority_domains = get_mx_domains(domain)
return [d.domain[:-1] for d in priority_domains]
return [d[:-1] for _, d in priority_domains]
def personal_email_already_used(email_address: str) -> bool:
@ -948,20 +918,10 @@ def decode_text(text: str, encoding: EmailEncoding = EmailEncoding.NO) -> str:
return text
def add_header(
msg: Message, text_header, html_header=None, subject_prefix=None
) -> Message:
def add_header(msg: Message, text_header, html_header=None) -> Message:
if not html_header:
html_header = text_header.replace("\n", "<br>")
if subject_prefix is not None:
subject = msg[headers.SUBJECT]
if not subject:
msg.add_header(headers.SUBJECT, subject_prefix)
else:
subject = f"{subject_prefix} {subject}"
msg.replace_header(headers.SUBJECT, subject)
content_type = msg.get_content_type().lower()
if content_type == "text/plain":
encoding = get_encoding(msg)
@ -1292,7 +1252,6 @@ def spf_pass(
f"SimpleLogin Alert: attempt to send emails from your alias {alias.email} from unknown IP Address",
render(
"transactional/spf-fail.txt",
user=user,
alias=alias.email,
ip=ip,
mailbox_url=config.URL + f"/dashboard/mailbox/{mailbox.id}#spf",
@ -1302,7 +1261,6 @@ def spf_pass(
),
render(
"transactional/spf-fail.html",
user=user,
ip=ip,
mailbox_url=config.URL + f"/dashboard/mailbox/{mailbox.id}#spf",
to_email=contact_email,
@ -1445,7 +1403,7 @@ def generate_verp_email(
# Time is in minutes granularity and start counting on 2022-01-01 to reduce bytes to represent time
data = [
verp_type.value,
object_id or 0,
object_id,
int((time.time() - VERP_TIME_START) / 60),
]
json_payload = json.dumps(data).encode("utf-8")

View File

@ -1,95 +0,0 @@
from abc import ABC, abstractmethod
import newrelic.agent
from app import config
from app.db import Session
from app.errors import ProtonPartnerNotSetUp
from app.events.generated import event_pb2
from app.log import LOG
from app.models import User, PartnerUser, SyncEvent
from app.proton.utils import get_proton_partner
from typing import Optional
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
class Dispatcher(ABC):
@abstractmethod
def send(self, event: bytes):
pass
class PostgresDispatcher(Dispatcher):
def send(self, event: bytes):
instance = SyncEvent.create(content=event, flush=True)
Session.execute(f"NOTIFY {NOTIFICATION_CHANNEL}, '{instance.id}';")
@staticmethod
def get():
return PostgresDispatcher()
class GlobalDispatcher:
__dispatcher: Optional[Dispatcher] = None
@staticmethod
def get_dispatcher() -> Dispatcher:
if not GlobalDispatcher.__dispatcher:
GlobalDispatcher.__dispatcher = PostgresDispatcher.get()
return GlobalDispatcher.__dispatcher
@staticmethod
def set_dispatcher(dispatcher: Optional[Dispatcher]):
GlobalDispatcher.__dispatcher = dispatcher
class EventDispatcher:
@staticmethod
def send_event(
user: User,
content: event_pb2.EventContent,
dispatcher: Optional[Dispatcher] = None,
skip_if_webhook_missing: bool = True,
):
if dispatcher is None:
dispatcher = GlobalDispatcher.get_dispatcher()
if config.EVENT_WEBHOOK_DISABLE:
LOG.i("Not sending events because webhook is disabled")
return
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
LOG.i(
"Not sending events because webhook is not configured and allowed to be empty"
)
return
partner_user = EventDispatcher.__partner_user(user.id)
if not partner_user:
LOG.i(f"Not sending events because there's no partner user for user {user}")
return
event = event_pb2.Event(
user_id=user.id,
external_user_id=partner_user.external_user_id,
partner_id=partner_user.partner_id,
content=content,
)
serialized = event.SerializeToString()
dispatcher.send(serialized)
event_type = content.WhichOneof("content")
newrelic.agent.record_custom_event("EventStoredToDb", {"type": event_type})
LOG.i("Sent event to the dispatcher")
@staticmethod
def __partner_user(user_id: int) -> Optional[PartnerUser]:
# Check if the current user has a partner_id
try:
proton_partner_id = get_proton_partner().id
except ProtonPartnerNotSetUp:
return None
# It has. Retrieve the information for the PartnerUser
return PartnerUser.get_by(user_id=user_id, partner_id=proton_partner_id)

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: event.proto
# Protobuf Python Version: 5.27.0
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
5,
27,
0,
'',
'event.proto'
)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None
_globals['_USERPLANCHANGED']._serialized_start=35
_globals['_USERPLANCHANGED']._serialized_end=75
_globals['_USERDELETED']._serialized_start=77
_globals['_USERDELETED']._serialized_end=90
_globals['_ALIASCREATED']._serialized_start=92
_globals['_ALIASCREATED']._serialized_end=184
_globals['_ALIASSTATUSCHANGED']._serialized_start=186
_globals['_ALIASSTATUSCHANGED']._serialized_end=270
_globals['_ALIASDELETED']._serialized_start=272
_globals['_ALIASDELETED']._serialized_end=313
_globals['_ALIASCREATEDLIST']._serialized_start=315
_globals['_ALIASCREATEDLIST']._serialized_end=383
_globals['_EVENTCONTENT']._serialized_start=386
_globals['_EVENTCONTENT']._serialized_end=789
_globals['_EVENT']._serialized_start=791
_globals['_EVENT']._serialized_end=912
# @@protoc_insertion_point(module_scope)

View File

@ -1,84 +0,0 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class UserPlanChanged(_message.Message):
__slots__ = ("plan_end_time",)
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
plan_end_time: int
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
class UserDeleted(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...
class AliasCreated(_message.Message):
__slots__ = ("id", "email", "note", "enabled", "created_at")
ID_FIELD_NUMBER: _ClassVar[int]
EMAIL_FIELD_NUMBER: _ClassVar[int]
NOTE_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
id: int
email: str
note: str
enabled: bool
created_at: int
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., note: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
class AliasStatusChanged(_message.Message):
__slots__ = ("id", "email", "enabled", "created_at")
ID_FIELD_NUMBER: _ClassVar[int]
EMAIL_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
id: int
email: str
enabled: bool
created_at: int
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
class AliasDeleted(_message.Message):
__slots__ = ("id", "email")
ID_FIELD_NUMBER: _ClassVar[int]
EMAIL_FIELD_NUMBER: _ClassVar[int]
id: int
email: str
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ...) -> None: ...
class AliasCreatedList(_message.Message):
__slots__ = ("events",)
EVENTS_FIELD_NUMBER: _ClassVar[int]
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
class EventContent(_message.Message):
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChanged
user_deleted: UserDeleted
alias_created: AliasCreated
alias_status_change: AliasStatusChanged
alias_deleted: AliasDeleted
alias_create_list: AliasCreatedList
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
class Event(_message.Message):
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
USER_ID_FIELD_NUMBER: _ClassVar[int]
EXTERNAL_USER_ID_FIELD_NUMBER: _ClassVar[int]
PARTNER_ID_FIELD_NUMBER: _ClassVar[int]
CONTENT_FIELD_NUMBER: _ClassVar[int]
user_id: int
external_user_id: str
partner_id: int
content: EventContent
def __init__(self, user_id: _Optional[int] = ..., external_user_id: _Optional[str] = ..., partner_id: _Optional[int] = ..., content: _Optional[_Union[EventContent, _Mapping]] = ...) -> None: ...

View File

@ -30,9 +30,7 @@ def apply_dmarc_policy_for_forward_phase(
) -> Tuple[Message, Optional[str]]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return msg, None
LOG.i(f"Spam check result in {spam_result}")
from_header = get_header_unicode(msg[headers.FROM])
@ -64,7 +62,6 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
msg,
warning_plain_text,
warning_html,
subject_prefix="[Possible phishing attempt]",
)
return changed_msg, None
@ -77,7 +74,6 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
msg,
warning_plain_text,
warning_html,
subject_prefix="[Possible phishing attempt]",
)
return changed_msg, None
@ -106,14 +102,12 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
f"An email sent to {alias.email} has been quarantined",
render(
"transactional/message-quarantine-dmarc.txt.jinja2",
user=user,
from_header=from_header,
alias=alias,
refused_email_url=email_log.get_dashboard_url(),
),
render(
"transactional/message-quarantine-dmarc.html",
user=user,
from_header=from_header,
alias=alias,
refused_email_url=email_log.get_dashboard_url(),
@ -156,10 +150,8 @@ def apply_dmarc_policy_for_reply_phase(
) -> Optional[str]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return None
LOG.i(f"Spam check result is {spam_result}")
if spam_result.dmarc not in (
DmarcCheckResult.quarantine,
DmarcCheckResult.reject,
@ -178,14 +170,12 @@ def apply_dmarc_policy_for_reply_phase(
f"Attempt to send an email to your contact {contact_recipient.email} from {envelope.mail_from}",
render(
"transactional/spoof-reply.txt.jinja2",
user=alias_from.user,
contact=contact_recipient,
alias=alias_from,
sender=envelope.mail_from,
),
render(
"transactional/spoof-reply.html",
user=alias_from.user,
contact=contact_recipient,
alias=alias_from,
sender=envelope.mail_from,

View File

@ -319,13 +319,11 @@ def report_complaint_to_user_in_forward_phase(
f"Abuse report from {capitalized_name}",
render(
"transactional/provider-complaint-forward-phase.txt.jinja2",
user=user,
email=mailbox_email,
provider=capitalized_name,
),
render(
"transactional/provider-complaint-forward-phase.html",
user=user,
email=mailbox_email,
provider=capitalized_name,
),

View File

@ -3,7 +3,6 @@ from email.header import Header
from email.message import Message
from app.email import headers
from app import config
from app.email_utils import add_or_replace_header, delete_header
from app.handler.unsubscribe_encoder import (
UnsubscribeEncoder,
@ -48,11 +47,6 @@ class UnsubscribeGenerator:
method = raw_method[start + 1 : end]
url_data = urllib.parse.urlparse(method)
if url_data.scheme == "mailto":
if url_data.path == config.UNSUBSCRIBER:
LOG.debug(
f"Skipping replacing unsubscribe since the original email already points to {config.UNSUBSCRIBER}"
)
return message
query_data = urllib.parse.parse_qs(url_data.query)
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")

View File

@ -5,7 +5,6 @@ from typing import Optional
from aiosmtpd.smtp import Envelope
from app import config
from app import alias_utils
from app.db import Session
from app.email import headers, status
from app.email_utils import (
@ -102,8 +101,7 @@ class UnsubscribeHandler:
mailbox.email, alias
):
return status.E509
LOG.i(f"User disabled alias {alias} via unsubscribe header")
alias_utils.change_alias_status(alias, enabled=False)
alias.enabled = False
Session.commit()
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
for mailbox in alias.mailboxes:

View File

@ -30,10 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url)
# Replace invisible character
lines = [
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
]
lines = [line.decode("utf-8") for line in r.iter_lines()]
import_from_csv(batch_import, user, lines)

View File

@ -1,52 +0,0 @@
import newrelic.agent
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
from app.log import LOG
from app.models import User, Alias
def send_alias_creation_events_for_user(
user: User, dispatcher: Dispatcher, chunk_size=50
):
if user.disabled:
LOG.i("User {user} is disabled. Skipping sending events for that user")
return
chunk_size = min(chunk_size, 50)
event_list = []
LOG.i("Sending alias create events for user {user}")
for alias in (
Alias.yield_per_query(chunk_size)
.filter_by(user_id=user.id)
.order_by(Alias.id.asc())
):
event_list.append(
AliasCreated(
id=alias.id,
email=alias.email,
note=alias.note,
enabled=alias.enabled,
created_at=int(alias.created_at.timestamp),
)
)
if len(event_list) >= chunk_size:
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
newrelic.agent.record_custom_metric(
"Custom/event_alias_created_event", len(event_list)
)
event_list = []
if len(event_list) > 0:
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
newrelic.agent.record_custom_metric(
"Custom/event_alias_created_event", len(event_list)
)

View File

@ -137,9 +137,7 @@ class ExportUserDataJob:
msg[headers.SUBJECT] = "Your SimpleLogin data"
msg[headers.FROM] = f'"SimpleLogin (noreply)" <{config.NOREPLY}>'
msg[headers.TO] = to_email
msg.attach(
MIMEText(render("transactional/user-report.html", user=self._user), "html")
)
msg.attach(MIMEText(render("transactional/user-report.html"), "html"))
attachment = MIMEApplication(zipped_contents.read())
attachment.add_header(
"Content-Disposition", "attachment", filename="user_report.zip"

View File

@ -76,6 +76,7 @@ class SendRequest:
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
self.save_request_to_file(file_path)
@staticmethod
def save_request_to_failed_dir(self, prefix: str = "DeliveryRetryFail"):
file_name = (
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"

View File

@ -1,263 +0,0 @@
import dataclasses
import secrets
import random
from typing import Optional
import arrow
from app import config
from app.config import JOB_DELETE_MAILBOX
from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
send_email,
render,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import User, Mailbox, Job, MailboxActivation
@dataclasses.dataclass
class CreateMailboxOutput:
mailbox: Mailbox
activation: Optional[MailboxActivation]
class MailboxError(Exception):
def __init__(self, msg: str):
self.msg = msg
class OnlyPaidError(MailboxError):
def __init__(self):
self.msg = "Only available for paid plans"
class CannotVerifyError(MailboxError):
def __init__(self, msg: str):
self.msg = msg
MAX_ACTIVATION_TRIES = 3
def create_mailbox(
user: User,
email: str,
verified: bool = False,
send_email: bool = True,
use_digit_codes: bool = False,
send_link: bool = True,
) -> CreateMailboxOutput:
if not user.is_premium():
LOG.i(
f"User {user} has tried to create mailbox with {email} but is not premium"
)
raise OnlyPaidError()
if not is_valid_email(email):
LOG.i(
f"User {user} has tried to create mailbox with {email} but is not valid email"
)
raise MailboxError("Invalid email")
elif mailbox_already_used(email, user):
LOG.i(
f"User {user} has tried to create mailbox with {email} but email is already used"
)
raise MailboxError("Email already used")
elif not email_can_be_used_as_mailbox(email):
LOG.i(
f"User {user} has tried to create mailbox with {email} but email is invalid"
)
raise MailboxError("Invalid email")
new_mailbox = Mailbox.create(
email=email, user_id=user.id, verified=verified, commit=True
)
if verified:
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
return CreateMailboxOutput(mailbox=new_mailbox, activation=None)
LOG.i(f"User {user} has created mailbox with {email}")
activation = generate_activation_code(new_mailbox, use_digit_code=use_digit_codes)
output = CreateMailboxOutput(mailbox=new_mailbox, activation=activation)
if not send_email:
LOG.i(f"Skipping sending validation email for mailbox {new_mailbox}")
return output
send_verification_email(
user,
new_mailbox,
activation=activation,
send_link=send_link,
)
return output
def delete_mailbox(
user: User, mailbox_id: int, transfer_mailbox_id: Optional[int]
) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
LOG.i(
f"User {user} has tried to delete another user's mailbox with {mailbox_id}"
)
raise MailboxError("Invalid mailbox")
if mailbox.id == user.default_mailbox_id:
LOG.i(f"User {user} has tried to delete the default mailbox")
raise MailboxError("Cannot delete your default mailbox")
if transfer_mailbox_id and transfer_mailbox_id > 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
LOG.i(
f"User {user} has tried to transfer to a mailbox owned by another user"
)
raise MailboxError("You must transfer the aliases to a mailbox you own")
if transfer_mailbox.id == mailbox.id:
LOG.i(
f"User {user} has tried to transfer to the same mailbox he is deleting"
)
raise MailboxError(
"You can not transfer the aliases to the mailbox you want to delete"
)
if not transfer_mailbox.verified:
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
MailboxError("Your new mailbox is not verified")
# Schedule delete account job
LOG.i(
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id
if transfer_mailbox_id and transfer_mailbox_id > 0
else None,
},
run_at=arrow.now(),
commit=True,
)
return mailbox
def clear_activation_codes_for_mailbox(mailbox: Mailbox):
Session.query(MailboxActivation).filter(
MailboxActivation.mailbox_id == mailbox.id
).delete()
Session.commit()
def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
)
raise MailboxError("Invalid mailbox")
if mailbox.verified:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
)
clear_activation_codes_for_mailbox(mailbox)
return mailbox
if mailbox.user_id != user.id:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
)
raise MailboxError("Invalid mailbox")
activation = (
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
.order_by(MailboxActivation.created_at.desc())
.first()
)
if not activation:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because there is no activation"
)
raise MailboxError("Invalid code")
if activation.tries >= MAX_ACTIVATION_TRIES:
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
clear_activation_codes_for_mailbox(mailbox)
raise CannotVerifyError("Invalid activation code. Please request another code.")
if activation.created_at < arrow.now().shift(minutes=-15):
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
)
clear_activation_codes_for_mailbox(mailbox)
raise CannotVerifyError("Invalid activation code. Please request another code.")
if code != activation.code:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because code does not match"
)
activation.tries = activation.tries + 1
Session.commit()
raise CannotVerifyError("Invalid activation code")
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
mailbox.verified = True
clear_activation_codes_for_mailbox(mailbox)
return mailbox
def generate_activation_code(
mailbox: Mailbox, use_digit_code: bool = False
) -> MailboxActivation:
clear_activation_codes_for_mailbox(mailbox)
if use_digit_code:
if config.MAILBOX_VERIFICATION_OVERRIDE_CODE:
code = config.MAILBOX_VERIFICATION_OVERRIDE_CODE
else:
code = "{:06d}".format(random.randint(1, 999999))
else:
code = secrets.token_urlsafe(16)
return MailboxActivation.create(
mailbox_id=mailbox.id,
code=code,
tries=0,
commit=True,
)
def send_verification_email(
user: User, mailbox: Mailbox, activation: MailboxActivation, send_link: bool = True
):
LOG.i(
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
)
if send_link:
verification_url = (
config.URL
+ "/dashboard/mailbox_verify"
+ f"?mailbox_id={mailbox.id}&code={activation.code}"
)
else:
verification_url = None
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
code=activation.code,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
code=activation.code,
link=verification_url,
mailbox_email=mailbox.email,
),
)

View File

@ -263,15 +263,6 @@ class UnsubscribeBehaviourEnum(EnumE):
PreserveOriginal = 2
class AliasDeleteReason(EnumE):
Unspecified = 0
UserHasBeenDeleted = 1
ManualAction = 2
DirectoryDeleted = 3
MailboxDeleted = 4
CustomDomainDeleted = 5
class IntEnumType(sa.types.TypeDecorator):
impl = sa.Integer
@ -336,10 +327,9 @@ class Fido(Base, ModelMixin):
class User(Base, ModelMixin, UserMixin, PasswordOracle):
__tablename__ = "users"
FLAG_DISABLE_CREATE_CONTACTS = 1 << 0
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
FLAG_CREATED_FROM_PARTNER = 1 << 1
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
email = sa.Column(sa.String(256), unique=True, nullable=False)
@ -535,15 +525,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1"
)
# user opted in for data breach check
enable_data_breach_check = sa.Column(
sa.Boolean, default=False, nullable=False, server_default="0"
)
# bitwise flags. Allow for future expansion
flags = sa.Column(
sa.BigInteger,
default=FLAG_DISABLE_CREATE_CONTACTS,
default=FLAG_FREE_DISABLE_CREATE_ALIAS,
server_default="0",
nullable=False,
)
@ -667,27 +652,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return user
@classmethod
def delete(cls, obj_id, commit=False):
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import UserDeleted, EventContent
user: User = cls.get(obj_id)
EventDispatcher.send_event(user, EventContent(user_deleted=UserDeleted()))
# Manually delete all aliases for the user that is about to be deleted
from app.alias_utils import delete_alias
for alias in Alias.filter_by(user_id=user.id):
delete_alias(alias, user, AliasDeleteReason.UserHasBeenDeleted)
res = super(User, cls).delete(obj_id)
if commit:
Session.commit()
return res
def get_active_subscription(
self, include_partner_subscription: bool = True
) -> Optional[
@ -973,7 +937,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
def has_custom_domain(self):
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
def custom_domains(self) -> List["CustomDomain"]:
def custom_domains(self):
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
def available_domains_for_random_alias(
@ -985,8 +949,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
- the domain
"""
res = []
for domain in self.get_sl_domains(alias_options=alias_options):
res.append((True, domain.domain))
for domain in self.available_sl_domains(alias_options=alias_options):
res.append((True, domain))
for custom_domain in self.verified_custom_domains():
res.append((False, custom_domain.domain))
@ -1128,10 +1092,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
- Verified custom domains
"""
domains = [
sl_domain.domain
for sl_domain in self.get_sl_domains(alias_options=alias_options)
]
domains = self.available_sl_domains(alias_options=alias_options)
for custom_domain in self.verified_custom_domains():
domains.append(custom_domain.domain)
@ -1168,17 +1129,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
def can_create_contacts(self) -> bool:
if self.is_premium():
return True
if self.flags & User.FLAG_DISABLE_CREATE_CONTACTS == 0:
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def has_used_alias_from_partner(self) -> bool:
return (
self.flags
& (User.FLAG_CREATED_ALIAS_FROM_PARTNER | User.FLAG_CREATED_FROM_PARTNER)
> 0
)
def __repr__(self):
return f"<User {self.id} {self.name} {self.email}>"
@ -1468,9 +1422,6 @@ def generate_random_alias_email(
class Alias(Base, ModelMixin):
__tablename__ = "alias"
FLAG_PARTNER_CREATED = 1 << 0
user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
)
@ -1480,9 +1431,6 @@ class Alias(Base, ModelMixin):
name = sa.Column(sa.String(128), nullable=True, default=None)
enabled = sa.Column(sa.Boolean(), default=True, nullable=False)
flags = sa.Column(
sa.BigInteger(), default=0, server_default="0", nullable=False, index=True
)
custom_domain_id = sa.Column(
sa.ForeignKey("custom_domain.id", ondelete="cascade"), nullable=True, index=True
@ -1660,31 +1608,12 @@ class Alias(Base, ModelMixin):
Session.add(new_alias)
DailyMetric.get_or_create_today_metric().nb_alias += 1
if (
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
):
user.flags = user.flags | User.FLAG_CREATED_ALIAS_FROM_PARTNER
if commit:
Session.commit()
if flush:
Session.flush()
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasCreated, EventContent
event = AliasCreated(
id=new_alias.id,
email=new_alias.email,
note=new_alias.note,
enabled=True,
created_at=int(new_alias.created_at.timestamp),
)
EventDispatcher.send_event(user, EventContent(alias_created=event))
return new_alias
@classmethod
@ -1863,8 +1792,6 @@ class Contact(Base, ModelMixin):
MAX_NAME_LENGTH = 512
FLAG_PARTNER_CREATED = 1 << 0
__tablename__ = "contact"
__table_args__ = (
@ -1923,9 +1850,6 @@ class Contact(Base, ModelMixin):
# whether contact is created automatically during the forward phase
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
# contact flags
flags = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
@property
def email(self):
return self.website_email
@ -2285,12 +2209,6 @@ class DeletedAlias(Base, ModelMixin):
__tablename__ = "deleted_alias"
email = sa.Column(sa.String(256), unique=True, nullable=False)
reason = sa.Column(
IntEnumType(AliasDeleteReason),
nullable=False,
default=AliasDeleteReason.Unspecified,
server_default=str(AliasDeleteReason.Unspecified.value),
)
@classmethod
def create(cls, **kw):
@ -2424,18 +2342,6 @@ class CustomDomain(Base, ModelMixin):
sa.Boolean, nullable=False, default=False, server_default="0"
)
partner_id = sa.Column(
sa.Integer,
sa.ForeignKey("partner.id"),
nullable=True,
default=None,
server_default=None,
)
pending_deletion = sa.Column(
sa.Boolean, nullable=False, default=False, server_default="0"
)
__table_args__ = (
Index(
"ix_unique_domain", # Index name
@ -2443,8 +2349,6 @@ class CustomDomain(Base, ModelMixin):
unique=True,
postgresql_where=Column("ownership_verified"),
), # The condition
Index("ix_custom_domain_user_id", "user_id"),
Index("ix_custom_domain_pending_deletion", "pending_deletion"),
)
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
@ -2462,6 +2366,9 @@ class CustomDomain(Base, ModelMixin):
def get_trash_url(self):
return config.URL + f"/dashboard/domains/{self.id}/trash"
def get_ownership_dns_txt_value(self):
return f"sl-verification={self.ownership_txt_token}"
@classmethod
def create(cls, **kwargs):
domain = kwargs.get("domain")
@ -2489,13 +2396,6 @@ class CustomDomain(Base, ModelMixin):
if obj.is_sl_subdomain:
DeletedSubdomain.create(domain=obj.domain)
from app import alias_utils
for alias in Alias.filter_by(custom_domain_id=obj_id):
alias_utils.delete_alias(
alias, obj.user, AliasDeleteReason.CustomDomainDeleted
)
return super(CustomDomain, cls).delete(obj_id)
@property
@ -2503,7 +2403,7 @@ class CustomDomain(Base, ModelMixin):
return sorted(self._auto_create_rules, key=lambda rule: rule.order)
def __repr__(self):
return f"<Custom Domain {self.id} {self.domain}>"
return f"<Custom Domain {self.domain}>"
class AutoCreateRule(Base, ModelMixin):
@ -2568,12 +2468,6 @@ class DomainDeletedAlias(Base, ModelMixin):
domain = orm.relationship(CustomDomain)
user = orm.relationship(User, foreign_keys=[user_id])
reason = sa.Column(
IntEnumType(AliasDeleteReason),
nullable=False,
default=AliasDeleteReason.Unspecified,
server_default=str(AliasDeleteReason.Unspecified.value),
)
@classmethod
def create(cls, **kw):
@ -2665,7 +2559,7 @@ class Directory(Base, ModelMixin):
for alias in Alias.filter_by(directory_id=obj_id):
from app import alias_utils
alias_utils.delete_alias(alias, user, AliasDeleteReason.DirectoryDeleted)
alias_utils.delete_alias(alias, user)
DeletedDirectory.create(name=obj.name)
cls.filter(cls.id == obj_id).delete()
@ -2692,13 +2586,10 @@ class Job(Base, ModelMixin):
nullable=False,
server_default=str(JobState.ready.value),
default=JobState.ready.value,
index=True,
)
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
taken_at = sa.Column(ArrowType, nullable=True)
__table_args__ = (Index("ix_state_run_at_taken_at", state, run_at, taken_at),)
def __repr__(self):
return f"<Job {self.id} {self.name} {self.payload}>"
@ -2744,15 +2635,10 @@ class Mailbox(Base, ModelMixin):
return False
def nb_alias(self):
alias_ids = set(
am.alias_id
for am in AliasMailbox.filter_by(mailbox_id=self.id).values(
AliasMailbox.alias_id
)
return (
AliasMailbox.filter_by(mailbox_id=self.id).count()
+ Alias.filter_by(mailbox_id=self.id).count()
)
for alias in Alias.filter_by(mailbox_id=self.id).values(Alias.id):
alias_ids.add(alias.id)
return len(alias_ids)
def is_proton(self) -> bool:
if (
@ -2766,9 +2652,9 @@ class Mailbox(Base, ModelMixin):
from app.email_utils import get_email_local_part
mx_domains = get_mx_domains(get_email_local_part(self.email))
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
# Proton is the first domain
if mx_domains and mx_domains[0].domain in (
if mx_domains and mx_domains[0][1] in (
"mail.protonmail.ch.",
"mailsec.protonmail.ch.",
):
@ -2793,7 +2679,7 @@ class Mailbox(Base, ModelMixin):
from app import alias_utils
# only put aliases that have mailbox as a single mailbox into trash
alias_utils.delete_alias(alias, user, AliasDeleteReason.MailboxDeleted)
alias_utils.delete_alias(alias, user)
Session.commit()
cls.filter(cls.id == obj_id).delete()
@ -2801,15 +2687,12 @@ class Mailbox(Base, ModelMixin):
@property
def aliases(self) -> [Alias]:
ret = dict(
(alias.id, alias) for alias in Alias.filter_by(mailbox_id=self.id).all()
)
ret = Alias.filter_by(mailbox_id=self.id).all()
for am in AliasMailbox.filter_by(mailbox_id=self.id):
if am.alias_id not in ret:
ret[am.alias_id] = am.alias
ret.append(am.alias)
return list(ret.values())
return ret
@classmethod
def create(cls, **kw):
@ -2821,16 +2704,6 @@ class Mailbox(Base, ModelMixin):
return f"<Mailbox {self.id} {self.email}>"
class MailboxActivation(Base, ModelMixin):
__tablename__ = "mailbox_activation"
mailbox_id = sa.Column(
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
)
code = sa.Column(sa.String(32), nullable=False, index=True)
tries = sa.Column(sa.Integer, default=0, nullable=False)
class AccountActivation(Base, ModelMixin):
"""contains code to activate the user account when they sign up on mobile"""
@ -3049,7 +2922,11 @@ class RecoveryCode(Base, ModelMixin):
@classmethod
def find_by_user_code(cls, user: User, code: str):
hashed_code = cls._hash_code(code)
return cls.get_by(user_id=user.id, code=hashed_code)
# TODO: Only return hashed codes once there aren't unhashed codes in the db.
found_code = cls.get_by(user_id=user.id, code=hashed_code)
if found_code:
return found_code
return cls.get_by(user_id=user.id, code=code)
@classmethod
def empty(cls, user):
@ -3060,9 +2937,7 @@ class RecoveryCode(Base, ModelMixin):
class Notification(Base, ModelMixin):
__tablename__ = "notification"
user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
)
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
message = sa.Column(sa.Text, nullable=False)
title = sa.Column(sa.String(512))
@ -3144,7 +3019,7 @@ class SLDomain(Base, ModelMixin):
)
def __repr__(self):
return f"<SLDomain {self.id} {self.domain} {'Premium' if self.premium_only else 'Free'}>"
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
class Monitoring(Base, ModelMixin):
@ -3303,20 +3178,6 @@ class TransactionalEmail(Base, ModelMixin):
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
@classmethod
def create(cls, **kw):
# whether to call Session.commit
commit = kw.pop("commit", False)
r = cls(**kw)
if not config.STORE_TRANSACTIONAL_EMAILS:
return r
Session.add(r)
if commit:
Session.commit()
return r
class Payout(Base, ModelMixin):
"""Referral payouts"""
@ -3514,7 +3375,6 @@ class AdminAuditLog(Base):
action=AuditLogActionEnum.stop_trial.value,
model="User",
model_id=user_id,
data={},
)
@classmethod
@ -3750,54 +3610,3 @@ class ApiToCookieToken(Base, ModelMixin):
code = secrets.token_urlsafe(32)
return super().create(code=code, **kwargs)
class SyncEvent(Base, ModelMixin):
"""This model holds the events that need to be sent to the webhook"""
__tablename__ = "sync_event"
content = sa.Column(sa.LargeBinary, unique=False, nullable=False)
taken_time = sa.Column(
ArrowType, default=None, nullable=True, server_default=None, index=True
)
retry_count = sa.Column(sa.Integer, default=0, nullable=False, server_default="0")
__table_args__ = (
sa.Index("ix_sync_event_created_at", "created_at"),
sa.Index("ix_sync_event_taken_time", "taken_time"),
)
def mark_as_taken(self) -> bool:
sql = """
UPDATE sync_event
SET taken_time = :taken_time
WHERE id = :sync_event_id
AND taken_time IS NULL
"""
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
res = Session.execute(sql, args)
Session.commit()
return res.rowcount > 0
@classmethod
def get_dead_letter(cls, older_than: Arrow, max_retries: int) -> [SyncEvent]:
return (
SyncEvent.filter(
(
(
SyncEvent.taken_time.isnot(None)
& (SyncEvent.taken_time < older_than)
)
| (
SyncEvent.taken_time.is_(None)
& (SyncEvent.created_at < older_than)
)
)
& (SyncEvent.retry_count < max_retries)
)
.order_by(SyncEvent.id)
.limit(100)
.all()
)

View File

@ -20,7 +20,7 @@ def final():
if form.validate_on_submit():
alias = Alias.get_by(email=form.email.data)
if alias and alias.user_id == current_user.id:
send_test_email_alias(current_user, alias.email)
send_test_email_alias(alias.email, current_user.name)
flash("An email is sent to your alias", "success")
return render_template(

View File

@ -1,13 +1,7 @@
from app.onboarding.base import onboarding_bp
from flask import render_template, url_for, redirect
from flask import render_template
@onboarding_bp.route("/", methods=["GET"])
def index():
# Do the redirect to ensure cookies are set because they are SameSite=lax/strict
return redirect(url_for("onboarding.setup"))
@onboarding_bp.route("/setup", methods=["GET"])
def setup():
return render_template("onboarding/setup.html")
return render_template("onboarding/index.html")

View File

@ -27,7 +27,6 @@ def failed_payment(sub: Subscription, subscription_id: str):
"SimpleLogin - your subscription has failed to be renewed",
render(
"transactional/subscription-cancel.txt",
user=user,
end_date=arrow.arrow.datetime.utcnow(),
),
)

View File

@ -2,11 +2,9 @@ from dataclasses import dataclass
from enum import Enum
from flask import url_for
from typing import Optional
import arrow
from app import config
from app.errors import LinkException
from app.models import User, Partner, Job
from app.models import User, Partner
from app.proton.proton_client import ProtonClient, ProtonUser
from app.account_linking import (
process_login_case,
@ -43,21 +41,12 @@ class ProtonCallbackHandler:
def __init__(self, proton_client: ProtonClient):
self.proton_client = proton_client
def _initial_alias_sync(self, user: User):
Job.create(
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
payload={"user_id": user.id},
run_at=arrow.now(),
commit=True,
)
def handle_login(self, partner: Partner) -> ProtonCallbackResult:
try:
user = self.__get_partner_user()
if user is None:
return generate_account_not_allowed_to_log_in()
res = process_login_case(user, partner)
self._initial_alias_sync(res.user)
return ProtonCallbackResult(
redirect_to_login=False,
flash_message=None,
@ -86,7 +75,6 @@ class ProtonCallbackHandler:
if user is None:
return generate_account_not_allowed_to_log_in()
res = process_link_case(user, current_user, partner)
self._initial_alias_sync(res.user)
return ProtonCallbackResult(
redirect_to_login=False,
flash_message="Account successfully linked",

View File

@ -2,7 +2,6 @@ from newrelic import agent
from typing import Optional
from app.db import Session
from app.log import LOG
from app.errors import ProtonPartnerNotSetUp
from app.models import Partner, PartnerUser, User
@ -31,7 +30,6 @@ def perform_proton_account_unlink(current_user: User):
user_id=current_user.id, partner_id=proton_partner.id
)
if partner_user is not None:
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
PartnerUser.delete(partner_user.id)
Session.commit()
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})

View File

@ -30,9 +30,7 @@ def check_bucket_limit(
try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits:
LOG.i(
f"Rate limit hit for {lock_name} (bucket id {bucket_id}) -> {value}/{max_hits}"
)
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
newrelic.agent.record_custom_event(
"BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},

View File

@ -5,9 +5,19 @@ from typing import Optional
import boto3
import requests
from app import config
from app.config import (
AWS_REGION,
BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
AWS_ENDPOINT_URL,
)
from app.log import LOG
_s3_client = None
@ -15,12 +25,12 @@ def _get_s3client():
global _s3_client
if _s3_client is None:
args = {
"aws_access_key_id": config.AWS_ACCESS_KEY_ID,
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY,
"region_name": config.AWS_REGION,
"aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": AWS_REGION,
}
if config.AWS_ENDPOINT_URL:
args["endpoint_url"] = config.AWS_ENDPOINT_URL
if AWS_ENDPOINT_URL:
args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args)
return _s3_client
@ -28,8 +38,8 @@ def _get_s3client():
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0)
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, key)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
@ -37,7 +47,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
else:
_get_s3client().put_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=key,
Body=bs,
ContentType=content_type,
@ -47,8 +57,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
bs.seek(0)
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, path)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
@ -56,7 +66,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
else:
_get_s3client().put_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=path,
Body=bs,
# Support saving a remote file using Http header
@ -67,12 +77,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
def download_email(path: str) -> Optional[str]:
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f:
return f.read()
resp = _get_s3client().get_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=path,
)
if not resp or "Body" not in resp:
@ -86,30 +96,29 @@ def upload_from_url(url: str, upload_path):
def get_url(key: str, expires_in=3600) -> str:
if config.LOCAL_FILE_UPLOAD:
return config.URL + "/static/upload/" + key
if LOCAL_FILE_UPLOAD:
return URL + "/static/upload/" + key
else:
return _get_s3client().generate_presigned_url(
ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": config.BUCKET, "Key": key},
Params={"Bucket": BUCKET, "Key": key},
)
def delete(path: str):
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
os.remove(file_path)
if LOCAL_FILE_UPLOAD:
os.remove(os.path.join(UPLOAD_DIR, path))
else:
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path)
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
def create_bucket_if_not_exists():
s3client = _get_s3client()
buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]:
if bucket["Name"] == config.BUCKET:
if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists")
return
s3client.create_bucket(Bucket=config.BUCKET)
LOG.i(f"Bucket {config.BUCKET} created")
s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {BUCKET} created")

View File

@ -87,7 +87,6 @@ class RedisSessionStore(SessionInterface):
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
samesite = self.get_cookie_samesite(app)
val = pickle.dumps(dict(session))
ttl = int(app.permanent_session_lifetime.total_seconds())
# Only 5 minutes for non-authenticated sessions.
@ -110,7 +109,6 @@ class RedisSessionStore(SessionInterface):
domain=domain,
path=path,
secure=secure,
samesite=samesite,
)

View File

@ -2,9 +2,6 @@ import requests
from requests import RequestException
from app import config
from app.db import Session
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG
from app.models import User
@ -30,11 +27,7 @@ def execute_subscription_webhook(user: User):
LOG.i("Sent request to subscription update webhook successfully")
else:
LOG.i(
f"Request to webhook failed with status {response.status_code}: {response.text}"
f"Request to webhook failed with statue {response.status_code}: {response.text}"
)
except RequestException as e:
LOG.error(f"Subscription request exception: {e}")
event = UserPlanChanged(plan_end_time=sl_subscription_end)
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
Session.commit()

View File

@ -1,71 +0,0 @@
from typing import Optional
from app.db import Session
from app.log import LOG
from app.models import User, SLDomain, CustomDomain, Mailbox
class CannotSetAlias(Exception):
def __init__(self, msg: str):
self.msg = msg
class CannotSetMailbox(Exception):
def __init__(self, msg: str):
self.msg = msg
def set_default_alias_domain(user: User, domain_name: Optional[str]):
if not domain_name:
LOG.i(f"User {user} has set no domain as default domain")
user.default_alias_public_domain_id = None
user.default_alias_custom_domain_id = None
Session.flush()
return
sl_domain: SLDomain = SLDomain.get_by(domain=domain_name)
if sl_domain:
if sl_domain.hidden:
LOG.i(f"User {user} has tried to set up a hidden domain as default domain")
raise CannotSetAlias("Domain does not exist")
if sl_domain.premium_only and not user.is_premium():
LOG.i(f"User {user} has tried to set up a premium domain as default domain")
raise CannotSetAlias("You cannot use this domain")
LOG.i(f"User {user} has set public {sl_domain} as default domain")
user.default_alias_public_domain_id = sl_domain.id
user.default_alias_custom_domain_id = None
Session.flush()
return
custom_domain = CustomDomain.get_by(domain=domain_name)
if not custom_domain:
LOG.i(
f"User {user} has tried to set up an non existing domain as default domain"
)
raise CannotSetAlias("Domain does not exist or it hasn't been verified")
if custom_domain.user_id != user.id or not custom_domain.verified:
LOG.i(
f"User {user} has tried to set domain {custom_domain} as default domain that does not belong to the user or that is not verified"
)
raise CannotSetAlias("Domain does not exist or it hasn't been verified")
LOG.i(f"User {user} has set custom {custom_domain} as default domain")
user.default_alias_public_domain_id = None
user.default_alias_custom_domain_id = custom_domain.id
Session.flush()
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
raise CannotSetMailbox("Invalid mailbox")
if not mailbox.verified:
raise CannotSetMailbox("This is mailbox is not verified")
if mailbox.id == user.default_mailbox_id:
return mailbox
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
user.default_mailbox_id = mailbox.id
Session.commit()
return mailbox

View File

@ -14,7 +14,6 @@ from sqlalchemy.sql import Insert, text
from app import s3, config
from app.alias_utils import nb_email_log_for_mailbox
from app.api.views.apple import verify_receipt
from app.custom_domain_validation import CustomDomainValidation
from app.db import Session
from app.dns_utils import get_mx_domains, is_mx_equivalent
from app.email_utils import (
@ -62,9 +61,6 @@ from app.pgp_utils import load_public_key_and_check, PGPException
from app.proton.utils import get_proton_partner
from app.utils import sanitize_email
from server import create_light_app
from tasks.cleanup_old_imports import cleanup_old_imports
from tasks.cleanup_old_jobs import cleanup_old_jobs
from tasks.cleanup_old_notifications import cleanup_old_notifications
DELETE_GRACE_DAYS = 30
@ -267,13 +263,11 @@ def notify_manual_sub_end():
"Your SimpleLogin subscription will end soon",
render(
"transactional/coinbase/reminder-subscription.txt",
user=user,
coinbase_subscription=coinbase_subscription,
extend_subscription_url=extend_subscription_url,
),
render(
"transactional/coinbase/reminder-subscription.html",
user=user,
coinbase_subscription=coinbase_subscription,
extend_subscription_url=extend_subscription_url,
),
@ -829,12 +823,10 @@ def check_mailbox_valid_domain():
f"Mailbox {mailbox.email} is disabled",
render(
"transactional/disable-mailbox-warning.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
),
render(
"transactional/disable-mailbox-warning.html",
user=mailbox.user,
mailbox=mailbox,
),
retries=3,
@ -889,7 +881,6 @@ def check_mailbox_valid_pgp_keys():
f"Mailbox {mailbox.email}'s PGP Key is invalid",
render(
"transactional/invalid-mailbox-pgp-key.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
),
retries=3,
@ -906,11 +897,9 @@ def check_custom_domain():
LOG.i("custom domain has been deleted")
def check_single_custom_domain(custom_domain: CustomDomain):
def check_single_custom_domain(custom_domain):
mx_domains = get_mx_domains(custom_domain.domain)
validator = CustomDomainValidation(dkim_domain=config.EMAIL_DOMAIN)
expected_custom_domains = validator.get_expected_mx_records(custom_domain)
if not is_mx_equivalent(mx_domains, expected_custom_domains):
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY):
user = custom_domain.user
LOG.w(
"The MX record is not correctly set for %s %s %s",
@ -932,7 +921,6 @@ def check_single_custom_domain(custom_domain: CustomDomain):
f"Please update {custom_domain.domain} DNS on SimpleLogin",
render(
"transactional/custom-domain-dns-issue.txt.jinja2",
user=user,
custom_domain=custom_domain,
domain_dns_url=domain_dns_url,
),
@ -974,9 +962,6 @@ async def _hibp_check(api_key, queue):
This function to be ran simultaneously (multiple _hibp_check functions with different keys on the same queue) to make maximum use of multiple API keys.
"""
default_rate_sleep = (60.0 / config.HIBP_RPM) + 0.1
rate_sleep = default_rate_sleep
rate_hit_counter = 0
while True:
try:
alias_id = queue.get_nowait()
@ -984,14 +969,9 @@ async def _hibp_check(api_key, queue):
return
alias = Alias.get(alias_id)
# an alias can be deleted in the meantime
if not alias:
continue
user = alias.user
if user.disabled or not user.is_paid():
# Mark it as hibp done to skip it as if it had been checked
alias.hibp_last_check = arrow.utcnow()
Session.commit()
continue
return
LOG.d("Checking HIBP for %s", alias)
@ -1003,6 +983,7 @@ async def _hibp_check(api_key, queue):
f"https://haveibeenpwned.com/api/v3/breachedaccount/{urllib.parse.quote(alias.email)}",
headers=request_headers,
)
if r.status_code == 200:
# Breaches found
alias.hibp_breaches = [
@ -1010,27 +991,20 @@ async def _hibp_check(api_key, queue):
]
if len(alias.hibp_breaches) > 0:
LOG.w("%s appears in HIBP breaches %s", alias, alias.hibp_breaches)
if rate_hit_counter > 0:
rate_hit_counter -= 1
elif r.status_code == 404:
# No breaches found
alias.hibp_breaches = []
elif r.status_code == 429:
# rate limited
LOG.w("HIBP rate limited, check alias %s in the next run", alias)
rate_hit_counter += 1
rate_sleep = default_rate_sleep + (0.2 * rate_hit_counter)
if rate_hit_counter > 10:
LOG.w(f"HIBP rate limited too many times stopping with alias {alias}")
return
# Just sleep for a while
asyncio.sleep(5)
await asyncio.sleep(1.6)
return
elif r.status_code > 500:
LOG.w("HIBP server 5** error %s", r.status_code)
return
else:
LOG.error(
"An error occurred while checking alias %s: %s - %s",
"An error occured while checking alias %s: %s - %s",
alias,
r.status_code,
r.text,
@ -1041,63 +1015,9 @@ async def _hibp_check(api_key, queue):
Session.add(alias)
Session.commit()
LOG.d("Updated breach info for %s", alias)
await asyncio.sleep(rate_sleep)
LOG.d("Updated breaches info for %s", alias)
def get_alias_to_check_hibp(
oldest_hibp_allowed: arrow.Arrow,
user_ids_to_skip: list[int],
min_alias_id: int,
max_alias_id: int,
):
now = arrow.now()
alias_query = (
Session.query(Alias)
.join(User, User.id == Alias.user_id)
.join(Subscription, User.id == Subscription.user_id, isouter=True)
.join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True)
.join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True)
.join(
CoinbaseSubscription,
User.id == CoinbaseSubscription.user_id,
isouter=True,
)
.join(PartnerUser, User.id == PartnerUser.user_id, isouter=True)
.join(
PartnerSubscription,
PartnerSubscription.partner_user_id == PartnerUser.id,
isouter=True,
)
.filter(
or_(
Alias.hibp_last_check.is_(None),
Alias.hibp_last_check < oldest_hibp_allowed,
),
Alias.user_id.notin_(user_ids_to_skip),
Alias.enabled,
Alias.id >= min_alias_id,
Alias.id < max_alias_id,
User.disabled == False, # noqa: E712
User.enable_data_breach_check,
or_(
User.lifetime,
ManualSubscription.end_at > now,
Subscription.next_bill_date > now.date(),
AppleSubscription.expires_date > now,
CoinbaseSubscription.end_at > now,
PartnerSubscription.end_at > now,
),
)
)
if config.HIBP_SKIP_PARTNER_ALIAS:
alias_query = alias_query.filter(
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0
)
for alias in (
alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500)
):
yield alias
await asyncio.sleep(1.6)
async def check_hibp():
@ -1120,49 +1040,40 @@ async def check_hibp():
Session.commit()
LOG.d("Updated list of known breaches")
LOG.d("Getting the list of users to skip")
query = "select u.id, count(a.id) from users u, alias a where a.user_id=u.id group by u.id having count(a.id) > :max_alias"
rows = Session.execute(query, {"max_alias": config.HIBP_MAX_ALIAS_CHECK})
user_ids = [row[0] for row in rows]
LOG.d("Got %d users to skip" % len(user_ids))
LOG.d("Checking aliases")
LOG.d("Preparing list of aliases to check")
queue = asyncio.Queue()
min_alias_id = 0
max_alias_id = Session.query(func.max(Alias.id)).scalar()
step = 10000
now = arrow.now()
oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
alias_checked = 0
for alias_batch_id in range(min_alias_id, max_alias_id, step):
for alias in get_alias_to_check_hibp(
oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step
):
await queue.put(alias.id)
alias_checked += queue.qsize()
LOG.d(
f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}"
max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
for alias in (
Alias.filter(
or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date)
)
.filter(Alias.enabled)
.order_by(Alias.hibp_last_check.asc())
.yield_per(500)
.enable_eagerloads(False)
):
await queue.put(alias.id)
# Start one checking process per API key
# Each checking process will take one alias from the queue, get the info
# and then sleep for 1.5 seconds (due to HIBP API request limits)
checkers = []
for i in range(len(config.HIBP_API_KEYS)):
checker = asyncio.create_task(
_hibp_check(
config.HIBP_API_KEYS[i],
queue,
)
LOG.d("Need to check about %s aliases", queue.qsize())
# Start one checking process per API key
# Each checking process will take one alias from the queue, get the info
# and then sleep for 1.5 seconds (due to HIBP API request limits)
checkers = []
for i in range(len(config.HIBP_API_KEYS)):
checker = asyncio.create_task(
_hibp_check(
config.HIBP_API_KEYS[i],
queue,
)
checkers.append(checker)
)
checkers.append(checker)
# Wait until all checking processes are done
for checker in checkers:
await checker
# Wait until all checking processes are done
for checker in checkers:
await checker
LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches")
LOG.d("Done checking HIBP API for aliases in breaches")
def notify_hibp():
@ -1234,13 +1145,6 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
Session.commit()
def delete_old_data():
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
cleanup_old_imports(oldest_valid)
cleanup_old_jobs(oldest_valid)
cleanup_old_notifications(oldest_valid)
if __name__ == "__main__":
LOG.d("Start running cronjob")
parser = argparse.ArgumentParser()
@ -1255,7 +1159,6 @@ if __name__ == "__main__":
"notify_manual_subscription_end",
"notify_premium_end",
"delete_logs",
"delete_old_data",
"poll_apple_subscription",
"sanity_check",
"delete_old_monitoring",
@ -1284,9 +1187,6 @@ if __name__ == "__main__":
elif args.job == "delete_logs":
LOG.d("Deleted Logs")
delete_logs()
elif args.job == "delete_old_data":
LOG.d("Delete old data")
delete_old_data()
elif args.job == "poll_apple_subscription":
LOG.d("Poll Apple Subscriptions")
poll_apple_subscription()

View File

@ -37,12 +37,6 @@ jobs:
schedule: "15 5 * * *"
captureStderr: true
- name: SimpleLogin Delete Old data
command: python /code/cron.py -j delete_old_data
shell: /bin/bash
schedule: "30 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash

View File

@ -52,12 +52,8 @@ from flanker.addresslib import address
from flanker.addresslib.address import EmailAddress
from sqlalchemy.exc import IntegrityError
from app import pgp_utils, s3, config, contact_utils
from app.alias_utils import (
try_auto_create,
change_alias_status,
get_alias_recipient_name,
)
from app import pgp_utils, s3, config
from app.alias_utils import try_auto_create
from app.config import (
EMAIL_DOMAIN,
URL,
@ -199,16 +195,76 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
mail_from,
)
contact_email = mail_from
contact_result = contact_utils.create_contact(
email=contact_email,
alias=alias,
name=contact_name,
mail_from=mail_from,
allow_empty_email=True,
automatic_created=True,
from_partner=False,
)
return contact_result.contact
if not is_valid_email(contact_email):
LOG.w(
"invalid contact email %s. Parse from %s %s",
contact_email,
from_header,
mail_from,
)
# either reuse a contact with empty email or create a new contact with empty email
contact_email = ""
contact_email = sanitize_email(contact_email, not_lower=True)
if contact_name and "\x00" in contact_name:
LOG.w("issue with contact name %s", contact_name)
contact_name = ""
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
if contact.name != contact_name:
LOG.d(
"Update contact %s name %s to %s",
contact,
contact.name,
contact_name,
)
contact.name = contact_name
Session.commit()
# contact created in the past does not have mail_from and from_header field
if not contact.mail_from and mail_from:
LOG.d(
"Set contact mail_from %s: %s to %s",
contact,
contact.mail_from,
mail_from,
)
contact.mail_from = mail_from
Session.commit()
else:
try:
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
mail_from=mail_from,
reply_email=generate_reply_email(contact_email, alias)
if is_valid_email(contact_email)
else NOREPLY,
automatic_created=True,
)
if not contact_email:
LOG.d("Create a contact with invalid email for %s", alias)
contact.invalid_email = True
LOG.d(
"create contact %s for %s, reverse alias:%s",
contact_email,
alias,
contact.reply_email,
)
Session.commit()
except IntegrityError:
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
return contact
def get_or_create_reply_to_contact(
@ -233,7 +289,33 @@ def get_or_create_reply_to_contact(
)
return None
return contact_utils.create_contact(contact_address, alias, contact_name).contact
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
if contact:
return contact
else:
LOG.d(
"create contact %s for alias %s via reply-to header %s",
contact_address,
alias,
reply_to_header,
)
try:
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_address,
name=contact_name,
reply_email=generate_reply_email(contact_address, alias),
automatic_created=True,
)
Session.commit()
except IntegrityError:
LOG.w("Contact %s %s already exist", alias, contact_address)
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
return contact
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
@ -518,14 +600,12 @@ def handle_email_sent_to_ourself(alias, from_addr: str, msg: Message, user):
f"Email sent to {alias.email} from its own mailbox {from_addr}",
render(
"transactional/cycle-email.txt.jinja2",
user=user,
alias=alias,
from_addr=from_addr,
refused_email_url=refused_email_url,
),
render(
"transactional/cycle-email.html",
user=user,
alias=alias,
from_addr=from_addr,
refused_email_url=refused_email_url,
@ -579,9 +659,6 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
from_header = get_header_unicode(msg[headers.FROM])
LOG.d("Create or get contact for from_header:%s", from_header)
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
alias = (
contact.alias
) # In case the Session was closed in the get_or_create we re-fetch the alias
reply_to_contact = None
if msg[headers.REPLY_TO]:
@ -650,14 +727,12 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
f"Your mailbox {mailbox.email} is an alias",
render(
"transactional/mailbox-invalid.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
),
render(
"transactional/mailbox-invalid.html",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
@ -710,14 +785,12 @@ def forward_email_to_mailbox(
f"Your mailbox {mailbox.email} and alias {alias.email} use the same domain",
render(
"transactional/mailbox-invalid.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
),
render(
"transactional/mailbox-invalid.html",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
@ -731,7 +804,7 @@ def forward_email_to_mailbox(
email_log = EmailLog.create(
contact_id=contact.id,
user_id=contact.user_id,
user_id=user.id,
mailbox_id=mailbox.id,
alias_id=contact.alias_id,
message_id=str(msg[headers.MESSAGE_ID]),
@ -801,7 +874,6 @@ def forward_email_to_mailbox(
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
headers.LIST_UNSUBSCRIBE,
headers.LIST_UNSUBSCRIBE_POST,
] + headers.MIME_HEADERS
@ -1106,7 +1178,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
]
+ headers.MIME_HEADERS,
)
@ -1165,11 +1236,23 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
Session.commit()
recipient_name = get_alias_recipient_name(alias)
if recipient_name.message:
LOG.d(recipient_name.message)
LOG.d("From header is %s", recipient_name.name)
add_or_replace_header(msg, headers.FROM, recipient_name.name)
# make the email comes from alias
from_header = alias.email
# add alias name from alias
if alias.name:
LOG.d("Put alias name %s in from header", alias.name)
from_header = sl_formataddr((alias.name, alias.email))
elif alias.custom_domain:
# add alias name from domain
if alias.custom_domain.name:
LOG.d(
"Put domain default alias name %s in from header",
alias.custom_domain.name,
)
from_header = sl_formataddr((alias.custom_domain.name, alias.email))
LOG.d("From header is %s", from_header)
add_or_replace_header(msg, headers.FROM, from_header)
try:
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
@ -1190,7 +1273,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
f"Email sent to {contact.email} contains non reverse-alias addresses",
render(
"transactional/non-reverse-alias-reply-phase.txt.jinja2",
user=alias.user,
destination=contact.email,
alias=alias.email,
subject=msg[headers.SUBJECT],
@ -1412,7 +1494,6 @@ def handle_unknown_mailbox(
f"Attempt to use your alias {alias.email} from {envelope.mail_from}",
render(
"transactional/reply-must-use-personal-email.txt",
user=user,
alias=alias,
sender=envelope.mail_from,
authorize_address_link=authorize_address_link,
@ -1420,7 +1501,6 @@ def handle_unknown_mailbox(
),
render(
"transactional/reply-must-use-personal-email.html",
user=user,
alias=alias,
sender=envelope.mail_from,
authorize_address_link=authorize_address_link,
@ -1502,7 +1582,7 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
LOG.w(
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
)
change_alias_status(alias, enabled=False)
alias.enabled = False
Notification.create(
user_id=user.id,
@ -1521,14 +1601,12 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
f"Alias {alias.email} has been disabled due to multiple bounces",
render(
"transactional/bounce/automatic-disable-alias.txt",
user=alias.user,
alias=alias,
refused_email_url=refused_email_url,
mailbox_email=mailbox.email,
),
render(
"transactional/bounce/automatic-disable-alias.html",
user=alias.user,
alias=alias,
refused_email_url=refused_email_url,
mailbox_email=mailbox.email,
@ -1567,7 +1645,6 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
f"An email sent to {alias.email} cannot be delivered to your mailbox",
render(
"transactional/bounce/bounced-email.txt.jinja2",
user=alias.user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1577,7 +1654,6 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
),
render(
"transactional/bounce/bounced-email.html",
user=alias.user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1670,14 +1746,12 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
f"Email cannot be sent to { contact.email } from your alias { alias.email }",
render(
"transactional/bounce/bounce-email-reply-phase.txt",
user=user,
alias=alias,
contact=contact,
refused_email_url=refused_email_url,
),
render(
"transactional/bounce/bounce-email-reply-phase.html",
user=user,
alias=alias,
contact=contact,
refused_email_url=refused_email_url,
@ -1740,7 +1814,6 @@ def handle_spam(
f"Email from {alias.email} to {contact.website_email} is detected as spam",
render(
"transactional/spam-email-reply-phase.txt",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1748,7 +1821,6 @@ def handle_spam(
),
render(
"transactional/spam-email-reply-phase.html",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1771,7 +1843,6 @@ def handle_spam(
f"Email from {contact.website_email} to {alias.email} is detected as spam",
render(
"transactional/spam-email.txt",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1779,7 +1850,6 @@ def handle_spam(
),
render(
"transactional/spam-email.html",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1820,30 +1890,24 @@ def handle_transactional_bounce(
envelope: Envelope, msg, rcpt_to, transactional_id=None
):
LOG.d("handle transactional bounce sent to %s", rcpt_to)
if transactional_id is None:
LOG.i(
f"No transactional record for {envelope.mail_from} -> {envelope.rcpt_tos}"
)
return
# parse the TransactionalEmail
transactional_id = transactional_id or parse_id_from_bounce(rcpt_to)
transactional = TransactionalEmail.get(transactional_id)
# a transaction might have been deleted in delete_logs()
if not transactional:
LOG.i(
f"No transactional record for {envelope.mail_from} -> {envelope.rcpt_tos}"
)
return
LOG.i("Create bounce for %s", transactional.email)
bounce_info = get_mailbox_bounce_info(msg)
if bounce_info:
Bounce.create(
email=transactional.email,
info=bounce_info.as_bytes().decode(),
commit=True,
)
else:
LOG.w("cannot get bounce info, debug at %s", save_email_for_debugging(msg))
Bounce.create(email=transactional.email, commit=True)
if transactional:
LOG.i("Create bounce for %s", transactional.email)
bounce_info = get_mailbox_bounce_info(msg)
if bounce_info:
Bounce.create(
email=transactional.email,
info=bounce_info.as_bytes().decode(),
commit=True,
)
else:
LOG.w("cannot get bounce info, debug at %s", save_email_for_debugging(msg))
Bounce.create(email=transactional.email, commit=True)
def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
@ -1936,7 +2000,7 @@ def send_no_reply_response(mail_from: str, msg: Message):
ALERT_TO_NOREPLY,
mailbox.user.email,
"Auto: {}".format(msg[headers.SUBJECT] or "No subject"),
render("transactional/noreply.text.jinja2", user=mailbox.user),
render("transactional/noreply.text.jinja2"),
)
@ -1969,11 +2033,10 @@ def handle(envelope: Envelope, msg: Message) -> str:
return status.E204
# sanitize email headers
sanitize_header(msg, headers.FROM)
sanitize_header(msg, headers.TO)
sanitize_header(msg, headers.CC)
sanitize_header(msg, headers.REPLY_TO)
sanitize_header(msg, headers.MESSAGE_ID)
sanitize_header(msg, "from")
sanitize_header(msg, "to")
sanitize_header(msg, "cc")
sanitize_header(msg, "reply-to")
LOG.d(
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "
@ -2018,7 +2081,6 @@ def handle(envelope: Envelope, msg: Message) -> str:
"SimpleLogin shouldn't be used with another email forwarding system",
render(
"transactional/email-sent-from-reverse-alias.txt.jinja2",
user=user,
),
)

View File

@ -1,113 +0,0 @@
import argparse
from enum import Enum
from sys import argv, exit
from app.config import EVENT_LISTENER_DB_URI
from app.log import LOG
from events import event_debugger
from events.runner import Runner
from events.event_source import DeadLetterEventSource, PostgresEventSource
from events.event_sink import ConsoleEventSink, HttpEventSink
_DEFAULT_MAX_RETRIES = 10
class Mode(Enum):
DEAD_LETTER = "dead_letter"
LISTENER = "listener"
@staticmethod
def from_str(value: str):
if value == Mode.DEAD_LETTER.value:
return Mode.DEAD_LETTER
elif value == Mode.LISTENER.value:
return Mode.LISTENER
else:
raise ValueError(f"Invalid mode: {value}")
def main(mode: Mode, dry_run: bool, max_retries: int):
if mode == Mode.DEAD_LETTER:
LOG.i("Using DeadLetterEventSource")
source = DeadLetterEventSource(max_retries)
elif mode == Mode.LISTENER:
LOG.i("Using PostgresEventSource")
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
else:
raise ValueError(f"Invalid mode: {mode}")
if dry_run:
LOG.i("Starting with ConsoleEventSink")
sink = ConsoleEventSink()
else:
LOG.i("Starting with HttpEventSink")
sink = HttpEventSink()
runner = Runner(source=source, sink=sink)
runner.run()
def debug_event(event_id: str):
LOG.i(f"Debugging event {event_id}")
try:
event_id_int = int(event_id)
except ValueError:
raise ValueError(f"Invalid event id: {event_id}")
event_debugger.debug_event(event_id_int)
def run_event(event_id: str, delete_on_success: bool):
LOG.i(f"Running event {event_id}")
try:
event_id_int = int(event_id)
except ValueError:
raise ValueError(f"Invalid event id: {event_id}")
event_debugger.run_event(event_id_int, delete_on_success)
def args():
parser = argparse.ArgumentParser(description="Run event listener")
subparsers = parser.add_subparsers(dest="command")
listener_parser = subparsers.add_parser(Mode.LISTENER.value)
listener_parser.add_argument(
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
)
listener_parser.add_argument("--dry-run", action="store_true")
dead_letter_parser = subparsers.add_parser(Mode.DEAD_LETTER.value)
dead_letter_parser.add_argument(
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
)
dead_letter_parser.add_argument("--dry-run", action="store_true")
debug_parser = subparsers.add_parser("debug")
debug_parser.add_argument("event_id", help="ID of the event to debug")
run_parser = subparsers.add_parser("run")
run_parser.add_argument("event_id", help="ID of the event to run")
run_parser.add_argument("--delete-on-success", action="store_true")
return parser.parse_args()
if __name__ == "__main__":
if len(argv) < 2:
print("Invalid usage. Pass a valid subcommand as argument")
exit(1)
args = args()
if args.command in [Mode.LISTENER.value, Mode.DEAD_LETTER.value]:
main(
mode=Mode.from_str(args.command),
dry_run=args.dry_run,
max_retries=args.max_retries,
)
elif args.command == "debug":
debug_event(args.event_id)
elif args.command == "run":
run_event(args.event_id, args.delete_on_success)
else:
print("Invalid command")
exit(1)

View File

View File

@ -1,43 +0,0 @@
from app.events.generated import event_pb2
from app.models import SyncEvent
from events.event_sink import HttpEventSink
def debug_event(event_id: int):
event = SyncEvent.get_by(id=event_id)
if not event:
print("Event not found")
return
print(f"Info for event {event_id}")
print(f"- Created at: {event.created_at}")
print(f"- Updated at: {event.updated_at}")
print(f"- Taken time: {event.taken_time}")
print(f"- Retry count: {event.retry_count}")
print()
print("Event contents")
event_contents = event.content
parsed = event_pb2.Event.FromString(event_contents)
print(f"- UserID: {parsed.user_id}")
print(f"- ExternalUserID: {parsed.external_user_id}")
print(f"- PartnerID: {parsed.partner_id}")
content = parsed.content
print(f"Content: {content}")
def run_event(event_id: int, delete_on_success: bool = True):
event = SyncEvent.get_by(id=event_id)
if not event:
print("Event not found")
return
print(f"Processing event {event_id}")
sink = HttpEventSink()
res = sink.process(event)
if res:
print(f"Processed event {event_id}")
if delete_on_success:
SyncEvent.delete(event_id, commit=True)

View File

@ -1,46 +0,0 @@
import requests
import newrelic.agent
from abc import ABC, abstractmethod
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
from app.log import LOG
from app.models import SyncEvent
class EventSink(ABC):
@abstractmethod
def process(self, event: SyncEvent) -> bool:
pass
class HttpEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
if not EVENT_WEBHOOK:
LOG.warning("Skipping sending event because there is no webhook configured")
return False
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
res = requests.post(
url=EVENT_WEBHOOK,
data=event.content,
headers={"Content-Type": "application/x-protobuf"},
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
)
newrelic.agent.record_custom_event(
"EventSentToPartner", {"http_code": res.status_code}
)
if res.status_code != 200:
LOG.warning(
f"Failed to send event to webhook: {res.status_code} {res.text}"
)
return False
else:
LOG.info(f"Event {event.id} sent successfully to webhook")
return True
class ConsoleEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
LOG.info(f"Handling event {event.id}")
return True

View File

@ -1,112 +0,0 @@
import arrow
import newrelic.agent
import psycopg2
import select
from abc import ABC, abstractmethod
from app.db import Session
from app.log import LOG
from app.models import SyncEvent
from app.events.event_dispatcher import NOTIFICATION_CHANNEL
from time import sleep
from typing import Callable, NoReturn
_DEAD_LETTER_THRESHOLD_MINUTES = 10
_DEAD_LETTER_INTERVAL_SECONDS = 30
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
class EventSource(ABC):
@abstractmethod
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
pass
class PostgresEventSource(EventSource):
def __init__(self, connection_string: str):
self.__connection_string = connection_string
self.__connect()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
self.__listen(on_event)
except Exception as e:
LOG.warn(f"Error listening to events: {e}")
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
self.__connect()
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
self.__connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
)
cursor = self.__connection.cursor()
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
LOG.info("Starting to listen to events")
while True:
if select.select([self.__connection], [], [], 5) != ([], [], []):
self.__connection.poll()
while self.__connection.notifies:
notify = self.__connection.notifies.pop(0)
LOG.debug(
f"Got NOTIFY: pid={notify.pid} channel={notify.channel} payload={notify.payload}"
)
try:
webhook_id = int(notify.payload)
event = SyncEvent.get_by(id=webhook_id)
if event is not None:
if event.mark_as_taken():
on_event(event)
else:
LOG.info(
f"Event {event.id} was handled by another runner"
)
else:
LOG.info(f"Could not find event with id={notify.payload}")
except Exception as e:
LOG.warn(f"Error getting event: {e}")
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
def __connect(self):
self.__connection = psycopg2.connect(
self.__connection_string, application_name="sl-event-listen"
)
from app.db import Session
Session.close()
class DeadLetterEventSource(EventSource):
def __init__(self, max_retries: int):
self.__max_retries = max_retries
@newrelic.agent.background_task()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
threshold = arrow.utcnow().shift(
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
)
events = SyncEvent.get_dead_letter(
older_than=threshold, max_retries=self.__max_retries
)
if events:
LOG.info(f"Got {len(events)} dead letter events")
if events:
newrelic.agent.record_custom_metric(
"Custom/dead_letter_events_to_process", len(events)
)
for event in events:
on_event(event)
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
if not events:
LOG.debug("No dead letter events")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
except Exception as e:
LOG.warn(f"Error getting dead letter event: {e}")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)

View File

@ -1,46 +0,0 @@
import arrow
import newrelic.agent
from app.log import LOG
from app.db import Session
from app.models import SyncEvent
from events.event_sink import EventSink
from events.event_source import EventSource
class Runner:
def __init__(self, source: EventSource, sink: EventSink):
self.__source = source
self.__sink = sink
def run(self):
self.__source.run(self.__on_event)
@newrelic.agent.background_task()
def __on_event(self, event: SyncEvent):
try:
event_created_at = event.created_at
start_time = arrow.now()
success = self.__sink.process(event)
if success:
event_id = event.id
SyncEvent.delete(event.id, commit=True)
LOG.info(f"Marked {event_id} as done")
end_time = arrow.now() - start_time
time_between_taken_and_created = start_time - event_created_at
newrelic.agent.record_custom_metric("Custom/sync_event_processed", 1)
newrelic.agent.record_custom_metric(
"Custom/sync_event_process_time", end_time.total_seconds()
)
newrelic.agent.record_custom_metric(
"Custom/sync_event_elapsed_time",
time_between_taken_and_created.total_seconds(),
)
else:
event.retry_count = event.retry_count + 1
Session.commit()
except Exception as e:
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)

View File

@ -116,14 +116,6 @@ WORDS_FILE_PATH=local_data/test_words.txt
# CONNECT_WITH_PROTON=true
# CONNECT_WITH_PROTON_COOKIE_NAME=to_fill
# Login with OIDC
# CONNECT_WITH_OIDC_ICON=fa-github
# OIDC_WELL_KNOWN_URL=to_fill
# OIDC_SCOPES=openid email profile
# OIDC_NAME_FIELD=name
# OIDC_CLIENT_ID=to_fill
# OIDC_CLIENT_SECRET=to_fill
# Flask profiler
# FLASK_PROFILER_PATH=/tmp/flask-profiler.sql
# FLASK_PROFILER_PASSWORD=password

View File

@ -3,7 +3,7 @@ Run scheduled jobs.
Not meant for running job at precise time (+- 1h)
"""
import time
from typing import List, Optional
from typing import List
import arrow
from sqlalchemy.sql.expression import or_, and_
@ -14,9 +14,7 @@ from app.email_utils import (
send_email,
render,
)
from app.events.event_dispatcher import PostgresDispatcher
from app.import_utils import handle_batch_import
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
@ -199,18 +197,13 @@ def process_job(job: Job):
onboarding_mailbox(user)
elif job.name == config.JOB_ONBOARDING_4:
user_id = job.payload.get("user_id")
user: User = User.get(user_id)
user = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
# if user only has 1 mailbox which is Proton then do not send PGP onboarding email
mailboxes = user.mailboxes()
if len(mailboxes) == 1 and mailboxes[0].is_proton():
LOG.d("Do not send onboarding PGP email to Proton mailbox")
else:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
elif job.name == config.JOB_BATCH_IMPORT:
batch_import_id = job.payload.get("batch_import_id")
@ -226,44 +219,43 @@ def process_job(job: Job):
user_email = user.email
LOG.w("Delete user %s", user)
User.delete(user.id)
Session.commit()
send_email(
user_email,
"Your SimpleLogin account has been deleted",
render("transactional/account-delete.txt", user=user),
render("transactional/account-delete.html", user=user),
render("transactional/account-delete.txt"),
render("transactional/account-delete.html"),
retries=3,
)
User.delete(user.id)
Session.commit()
elif job.name == config.JOB_DELETE_MAILBOX:
delete_mailbox_job(job)
elif job.name == config.JOB_DELETE_DOMAIN:
custom_domain_id = job.payload.get("custom_domain_id")
custom_domain: Optional[CustomDomain] = CustomDomain.get(custom_domain_id)
custom_domain = CustomDomain.get(custom_domain_id)
if not custom_domain:
return
domain_name = custom_domain.domain
user = custom_domain.user
custom_domain_partner_id = custom_domain.partner_id
CustomDomain.delete(custom_domain.id)
Session.commit()
LOG.d("Domain %s deleted", domain_name)
if custom_domain_partner_id is None:
send_email(
user.email,
f"Your domain {domain_name} has been deleted",
f"""Domain {domain_name} along with its aliases are deleted successfully.
send_email(
user.email,
f"Your domain {domain_name} has been deleted",
f"""Domain {domain_name} along with its aliases are deleted successfully.
Regards,
SimpleLogin team.
""",
retries=3,
)
Regards,
SimpleLogin team.
""",
retries=3,
)
elif job.name == config.JOB_SEND_USER_REPORT:
export_job = ExportUserDataJob.create_from_job(job)
if export_job:
@ -272,16 +264,8 @@ def process_job(job: Job):
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d("Send proton welcome email to user %s", user)
LOG.d("send proton welcome email to user %s", user)
welcome_proton(user)
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d(f"Sending alias creation events for {user}")
send_alias_creation_events_for_user(
user, dispatcher=PostgresDispatcher.get()
)
else:
LOG.e("Unknown job name %s", job.name)

View File

@ -745,6 +745,8 @@ bullish
bullpen
bullring
bullseye
bullwhip
bully
bunch
bundle
bungee
@ -1147,6 +1149,7 @@ coherence
coherent
cohesive
coil
coke
cola
cold
coleslaw
@ -1671,6 +1674,8 @@ delta
deluge
delusion
deluxe
demanding
demeaning
demeanor
demise
democracy
@ -1892,6 +1897,9 @@ divisible
divisibly
division
divisive
divorcee
dizziness
dizzy
doable
docile
dock
@ -1905,6 +1913,7 @@ dole
dollar
dollhouse
dollop
dolly
dolphin
domain
domelike
@ -2018,6 +2027,7 @@ duh
duke
dumping
dumpling
dumpster
duo
dupe
duplex
@ -2026,12 +2036,14 @@ duplicity
durable
durably
duration
duress
during
dusk
dust
dutiful
duty
duvet
dwarf
dweeb
dwelled
dweller
@ -3770,6 +3782,10 @@ makeshift
making
malformed
malt
mama
mammal
mammary
mammogram
manager
managing
manatee
@ -3782,6 +3798,7 @@ mangle
mango
mangy
manhandle
manhole
manhood
manhunt
manicotti
@ -3796,6 +3813,7 @@ manmade
manned
mannish
manor
manpower
mantis
mantra
manual
@ -3832,6 +3850,7 @@ mashed
mashing
massager
masses
massive
mastiff
matador
matchbook
@ -3844,11 +3863,15 @@ maternal
maternity
math
mating
matriarch
matrimony
matrix
matron
matted
matter
maturely
maturing
maturity
mauve
maverick
maximize
@ -3868,6 +3891,9 @@ modify
modular
modulator
module
moisten
moistness
moisture
molar
molasses
mold
@ -3920,7 +3946,11 @@ morality
morally
morbidity
morbidly
morphine
morphing
morse
mortality
mortally
mortician
mortified
mortify
@ -3946,6 +3976,7 @@ mournful
mouse
mousiness
moustache
mousy
mouth
movable
move
@ -3954,6 +3985,7 @@ moving
mower
mowing
much
muck
mud
mug
mulberry
@ -3970,6 +4002,7 @@ mumbling
mumbo
mummified
mummify
mummy
mumps
munchkin
mundane
@ -4765,6 +4798,7 @@ princess
print
prior
prism
prison
prissy
pristine
privacy
@ -4788,6 +4822,8 @@ prodigal
prodigy
produce
product
profane
profanity
professed
professor
profile
@ -5956,6 +5992,10 @@ slit
sliver
slobbery
slogan
sloped
sloping
sloppily
sloppy
slot
slouching
slouchy
@ -5971,6 +6011,7 @@ smartness
smasher
smashing
smashup
smell
smelting
smile
smilingly
@ -5980,6 +6021,11 @@ smith
smitten
smock
smog
smoked
smokeless
smokiness
smoking
smoky
smolder
smooth
smother
@ -6001,6 +6047,7 @@ sneer
sneeze
sneezing
snide
sniff
snippet
snipping
snitch
@ -6156,6 +6203,7 @@ squiggle
squiggly
squint
squire
squirt
squishier
squishy
stability
@ -6275,6 +6323,7 @@ stoning
stony
stood
stooge
stool
stoop
stoplight
stoppable
@ -6409,9 +6458,12 @@ subwoofer
subzero
succulent
such
suction
sudden
sudoku
suds
sufferer
suffering
suffice
suffix
suffocate
@ -6463,6 +6515,7 @@ surplus
surprise
surreal
surrender
surrogate
surround
survey
survival
@ -6475,6 +6528,7 @@ suspend
suspense
sustained
sustainer
swab
swaddling
swagger
swampland
@ -6482,6 +6536,7 @@ swan
swapping
swarm
sway
swear
sweat
sweep
swell
@ -6550,6 +6605,9 @@ talcum
talisman
tall
talon
tamale
tameness
tamer
tamper
tank
tanned
@ -6589,6 +6647,7 @@ thaw
theater
theatrics
thee
theft
theme
theology
theorize
@ -6693,6 +6752,7 @@ trade
trading
tradition
traffic
tragedy
trailing
trailside
train
@ -6712,6 +6772,7 @@ trapped
trapper
trapping
traps
trash
travel
traverse
travesty

View File

@ -1,48 +0,0 @@
"""empty message
Revision ID: 52510a633d6f
Revises: 818b0a956205
Create Date: 2024-03-12 12:46:24.161644
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "52510a633d6f"
down_revision = "818b0a956205"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"alias", sa.Column("flags", sa.BigInteger(), server_default="0", nullable=False)
)
with op.get_context().autocommit_block():
op.create_index(op.f("ix_alias_flags"), "alias", ["flags"], unique=False)
op.create_index(op.f("ix_job_state"), "job", ["state"], unique=False)
op.create_index(
"ix_state_run_at_taken_at",
"job",
["state", "run_at", "taken_at"],
unique=False,
)
op.create_index(
op.f("ix_notification_user_id"), "notification", ["user_id"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index(op.f("ix_notification_user_id"), table_name="notification")
op.drop_index("ix_state_run_at_taken_at", table_name="job")
op.drop_index(op.f("ix_job_state"), table_name="job")
op.drop_index(op.f("ix_alias_flags"), table_name="alias")
op.drop_column("alias", "flags")
# ### end Alembic commands ###

View File

@ -1,29 +0,0 @@
"""empty message
Revision ID: fa2f19bb4e5a
Revises: 52510a633d6f
Create Date: 2024-04-09 13:12:26.305340
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fa2f19bb4e5a'
down_revision = '52510a633d6f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('enable_data_breach_check', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'enable_data_breach_check')
# ### end Alembic commands ###

View File

@ -1,38 +0,0 @@
"""Create sync_event table
Revision ID: 06a9a7133445
Revises: fa2f19bb4e5a
Create Date: 2024-05-17 13:11:20.402259
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '06a9a7133445'
down_revision = 'fa2f19bb4e5a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('sync_event',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('content', sa.LargeBinary(), nullable=False),
sa.Column('taken_time', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sync_event_created_at'), 'sync_event', ['created_at'], unique=False)
op.create_index(op.f('ix_sync_event_taken_time'), 'sync_event', ['taken_time'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sync_event')
# ### end Alembic commands ###

View File

@ -1,31 +0,0 @@
"""empty message
Revision ID: d608b8e48082
Revises: 06a9a7133445
Create Date: 2024-07-05 16:56:04.220173
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd608b8e48082'
down_revision = '06a9a7133445'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('deleted_alias', sa.Column('reason', sa.Integer(), default=0, server_default='0', nullable=False))
op.add_column('domain_deleted_alias', sa.Column('reason', sa.Integer(), default=0, server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('domain_deleted_alias', 'reason')
op.drop_column('deleted_alias', 'reason')
# ### end Alembic commands ###

View File

@ -1,28 +0,0 @@
"""add retry count to sync event
Revision ID: 56d08955fcab
Revises: d608b8e48082
Create Date: 2024-07-19 08:21:19.979973
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '56d08955fcab'
down_revision = 'd608b8e48082'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('sync_event', sa.Column('retry_count', sa.Integer(), server_default='0', nullable=False, default=0))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('sync_event', 'retry_count')
# ### end Alembic commands ###

View File

@ -1,42 +0,0 @@
"""empty message
Revision ID: 1c14339aae90
Revises: 56d08955fcab
Create Date: 2024-07-30 11:46:32.460221
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1c14339aae90'
down_revision = '56d08955fcab'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('mailbox_activation',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('mailbox_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=32), nullable=False),
sa.Column('tries', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['mailbox_id'], ['mailbox.id'], ondelete='cascade'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_mailbox_activation_code'), 'mailbox_activation', ['code'], unique=False)
op.create_index(op.f('ix_mailbox_activation_mailbox_id'), 'mailbox_activation', ['mailbox_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_mailbox_activation_mailbox_id'), table_name='mailbox_activation')
op.drop_index(op.f('ix_mailbox_activation_code'), table_name='mailbox_activation')
op.drop_table('mailbox_activation')
# ### end Alembic commands ###

View File

@ -1,30 +0,0 @@
"""Custom Domain partner id
Revision ID: 2441b7ff5da9
Revises: 1c14339aae90
Create Date: 2024-09-13 15:43:02.425964
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2441b7ff5da9'
down_revision = '1c14339aae90'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('custom_domain', sa.Column('partner_id', sa.Integer(), nullable=True, default=None, server_default=None))
op.create_foreign_key(None, 'custom_domain', 'partner', ['partner_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'custom_domain', type_='foreignkey')
op.drop_column('custom_domain', 'partner_id')
# ### end Alembic commands ###

View File

@ -1,31 +0,0 @@
"""contact.flags and custom_domain.pending_deletion
Revision ID: 88dd7a0abf54
Revises: 2441b7ff5da9
Create Date: 2024-09-19 15:41:20.910374
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '88dd7a0abf54'
down_revision = '2441b7ff5da9'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('contact', sa.Column('flags', sa.Integer(), server_default='0', nullable=False))
op.add_column('custom_domain', sa.Column('pending_deletion', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('custom_domain', 'pending_deletion')
op.drop_column('contact', 'flags')
# ### end Alembic commands ###

View File

@ -1,27 +0,0 @@
"""custom domain indices
Revision ID: 62afa3a10010
Revises: 88dd7a0abf54
Create Date: 2024-09-30 11:40:04.127791
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '62afa3a10010'
down_revision = '88dd7a0abf54'
branch_labels = None
depends_on = None
def upgrade():
with op.get_context().autocommit_block():
op.create_index('ix_custom_domain_pending_deletion', 'custom_domain', ['pending_deletion'], unique=False, postgresql_concurrently=True)
op.create_index('ix_custom_domain_user_id', 'custom_domain', ['user_id'], unique=False, postgresql_concurrently=True)
def downgrade():
with op.get_context().autocommit_block():
op.drop_index('ix_custom_domain_user_id', table_name='custom_domain', postgresql_concurrently=True)
op.drop_index('ix_custom_domain_pending_deletion', table_name='custom_domain', postgresql_concurrently=True)

View File

@ -4,7 +4,6 @@ import subprocess
from time import sleep
from typing import List, Dict
import arrow
import newrelic.agent
from app.db import Session
@ -94,75 +93,11 @@ def log_nb_db_connection():
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
@newrelic.agent.background_task()
def log_nb_db_connection_by_app_name():
# get the number of connections to the DB
rows = Session.execute(
"SELECT application_name, count(datid) FROM pg_stat_activity group by application_name"
)
for row in rows:
if row[0].find("sl-") == 0:
LOG.d("number of db connections for app %s = %s", row[0], row[1])
newrelic.agent.record_custom_metric(
f"Custom/nb_db_app_connection/{row[0]}", row[1]
)
@newrelic.agent.background_task()
def log_pending_to_process_events():
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
events_pending = list(r)[0][0]
LOG.d("number of events pending to process %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_to_process", events_pending
)
@newrelic.agent.background_task()
def log_events_pending_dead_letter():
since = arrow.now().shift(minutes=-10).datetime
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE (taken_time IS NOT NULL AND taken_time < :since)
OR (taken_time IS NULL AND created_at < :since)
""",
{"since": since},
)
events_pending = list(r)[0][0]
LOG.d("number of events pending dead letter %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_dead_letter", events_pending
)
@newrelic.agent.background_task()
def log_failed_events():
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE retry_count >= 10;
""",
)
failed_events = list(r)[0][0]
LOG.d("number of failed events %s", failed_events)
newrelic.agent.record_custom_metric("Custom/sync_events_failed", failed_events)
if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license())
while True:
log_postfix_metrics()
log_nb_db_connection()
log_pending_to_process_events()
log_events_pending_dead_letter()
log_failed_events()
log_nb_db_connection_by_app_name()
Session.close()
exporter.run()

View File

@ -1,49 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Update alias notes and backfill flag"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
noteSql = "(note = 'Created through Proton' or note = 'Created through partner Proton')"
alias_query = f"UPDATE alias set note = NULL, flags = flags | :flag where id>=:start AND id<:end and {noteSql}"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows_done = Session.execute(
alias_query,
{
"start": batch_start,
"end": batch_start + step,
"flag": Alias.FLAG_PARTNER_CREATED,
},
)
updated += rows_done.rowcount
Session.commit()
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python3
import argparse
import random
import time
from sqlalchemy import func
from app import config
from app.models import Alias, Contact
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
max_alias_id: int = Session.query(func.max(Alias.id)).scalar()
start = time.time()
tests = 1000
for i in range(tests):
alias = (
Alias.filter(Alias.id > int(random.random() * max_alias_id))
.order_by(Alias.id.asc())
.limit(1)
.first()
)
contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first()
mailboxes = alias.mailboxes
user = alias.user
if i % 10:
print("{i} -> {alias.id}")
end = time.time()
time_taken = end - start
print(f"Took {time_taken} -> {time_taken/tests} per test")

View File

@ -1,56 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias, SLDomain
from app.db import Session
parser = argparse.ArgumentParser(
prog="Mark partner created aliases with the PARTNER_CREATED flag",
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Updating aliases from {alias_id_start} to {max_alias_id}")
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
cond = [f"email like '%{domain.domain}'" for domain in domains]
sql_or_cond = " OR ".join(cond)
sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})"
print(sql)
step = 1000
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
updated += Session.execute(
sql,
{
"start": batch_start,
"end": batch_start + step,
"flag": Alias.FLAG_PARTNER_CREATED,
},
).rowcount
elapsed = time.time() - start_time
time_per_alias = elapsed / (batch_start - alias_id_start + step)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
percent = int(
((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start)
)
print(
f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining"
)
print(f"Updated aliases up to {max_alias_id}")

View File

@ -1,55 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias, User
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill user flags for partner alias created"
)
parser.add_argument(
"-s", "--start_user_id", default=0, type=int, help="Initial user_id"
)
parser.add_argument("-e", "--end_user_id", default=0, type=int, help="Last user_id")
args = parser.parse_args()
user_id_start = args.start_user_id
max_user_id = args.end_user_id
if max_user_id == 0:
max_user_id = Session.query(func.max(User.id)).scalar()
print(f"Checking user {user_id_start} to {max_user_id}")
step = 1000
el_query = "SELECT user_id, count(id) from alias where user_id>=:start AND user_id < :end AND flags & :alias_flag > 0 GROUP BY user_id"
user_update_query = "UPDATE users set flags = flags | :user_flag where id = :user_id"
updated = 0
start_time = time.time()
for batch_start in range(user_id_start, max_user_id, step):
rows = Session.execute(
el_query,
{
"start": batch_start,
"end": batch_start + step,
"alias_flag": Alias.FLAG_PARTNER_CREATED,
},
)
for row in rows:
if row[1] > 0:
Session.execute(
user_update_query,
{"user_id": row[0], "user_flag": User.FLAG_CREATED_ALIAS_FROM_PARTNER},
)
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_user_id - last_batch_id
time_remaining = (max_user_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rUser {batch_start}/{max_user_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -1,53 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from app import config
from app.email_utils import generate_reply_email
from app.email_validation import is_valid_email
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
el_query = "SELECT id, alias_id, website_email from contact where id>=:last_id AND reply_email=:reply_email ORDER BY id ASC LIMIT :step"
update_query = "UPDATE contact SET reply_email=:reply_email WHERE id=:contact_id "
updated = 0
start_time = time.time()
step = 100
last_id = 0
print(f"Replacing contacts with reply_email={config.NOREPLY}")
while True:
rows = Session.execute(
el_query, {"last_id": last_id, "reply_email": config.NOREPLY, "step": step}
)
loop_updated = 0
for row in rows:
contact_id = row[0]
alias_id = row[1]
last_id = contact_id
website_email = row[2]
contact_email_for_reply = website_email if is_valid_email(website_email) else ""
alias = Alias.get(alias_id)
if alias is None:
print(f"CANNOT find alias {alias_id} in database for contact {contact_id}")
reply_email = generate_reply_email(contact_email_for_reply, alias)
print(
f"Replacing contact {contact_id} with {website_email} reply_email for {reply_email}"
)
Session.execute(
update_query, {"contact_id": row[0], "reply_email": reply_email}
)
Session.commit()
updated += 1
loop_updated += 1
elapsed = time.time() - start_time
print(f"\rContact {last_id} done")
if loop_updated == 0:
break
print("")

420
app/poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -276,6 +276,21 @@ files = [
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
]
[[package]]
name = "backports.entry-points-selectable"
version = "1.1.1"
description = "Compatibility shim providing selectable entry points for older implementations"
optional = false
python-versions = ">=2.7"
files = [
{file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"},
{file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"},
]
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["pytest", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
[[package]]
name = "bcrypt"
version = "3.2.0"
@ -476,13 +491,13 @@ pycparser = "*"
[[package]]
name = "cfgv"
version = "3.4.0"
version = "3.2.0"
description = "Validate configuration and produce human readable error messages."
optional = false
python-versions = ">=3.8"
python-versions = ">=3.6.1"
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
{file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"},
{file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"},
]
[[package]]
@ -675,21 +690,6 @@ sdist = ["setuptools-rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
[[package]]
name = "cssbeautifier"
version = "1.15.1"
description = "CSS unobfuscator and beautifier."
optional = false
python-versions = "*"
files = [
{file = "cssbeautifier-1.15.1.tar.gz", hash = "sha256:9f7064362aedd559c55eeecf6b6bed65e05f33488dcbe39044f0403c26e1c006"},
]
[package.dependencies]
editorconfig = ">=0.12.2"
jsbeautifier = "*"
six = ">=1.13.0"
[[package]]
name = "decorator"
version = "4.4.2"
@ -734,40 +734,41 @@ graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "distlib"
version = "0.3.8"
version = "0.3.1"
description = "Distribution utilities"
optional = false
python-versions = "*"
files = [
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
{file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"},
{file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"},
]
[[package]]
name = "djlint"
version = "1.34.1"
version = "1.3.0"
description = "HTML Template Linter and Formatter"
optional = false
python-versions = ">=3.8.0,<4.0.0"
python-versions = ">=3.7,<4.0"
files = [
{file = "djlint-1.34.1-py3-none-any.whl", hash = "sha256:96ff1c464fb6f061130ebc88663a2ea524d7ec51f4b56221a2b3f0320a3cfce8"},
{file = "djlint-1.34.1.tar.gz", hash = "sha256:db93fa008d19eaadb0454edf1704931d14469d48508daba2df9941111f408346"},
{file = "djlint-1.3.0-py3-none-any.whl", hash = "sha256:0c986bf542cdac3025d431a5b15e6c3977f652f2e76e408dbb5e7aaab6b73d99"},
{file = "djlint-1.3.0.tar.gz", hash = "sha256:b2d8e6c0a14f88da165296f0da05795d15299b7ab0a9093d670ce9ffd867bc79"},
]
[package.dependencies]
click = ">=8.0.1,<9.0.0"
colorama = ">=0.4.4,<0.5.0"
cssbeautifier = ">=1.14.4,<2.0.0"
html-tag-names = ">=0.1.2,<0.2.0"
html-void-elements = ">=0.1.0,<0.2.0"
jsbeautifier = ">=1.14.4,<2.0.0"
json5 = ">=0.9.11,<0.10.0"
pathspec = ">=0.12.0,<0.13.0"
importlib-metadata = ">=4.11.0,<5.0.0"
pathspec = ">=0.9.0,<0.10.0"
PyYAML = ">=6.0,<7.0"
regex = ">=2023.0.0,<2024.0.0"
regex = ">=2022.1.18,<2023.0.0"
tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""}
tqdm = ">=4.62.2,<5.0.0"
[package.extras]
test = ["coverage (>=6.3.1,<7.0.0)", "pytest (>=7.0.1,<8.0.0)", "pytest-cov (>=3.0.0,<4.0.0)"]
[[package]]
name = "dkimpy"
version = "1.0.5"
@ -805,16 +806,6 @@ doh = ["requests", "requests-toolbelt"]
idna = ["idna (>=2.1)"]
trio = ["sniffio (>=1.1)", "trio (>=0.14.0)"]
[[package]]
name = "editorconfig"
version = "0.12.4"
description = "EditorConfig File Locator and Interpreter for Python"
optional = false
python-versions = "*"
files = [
{file = "EditorConfig-0.12.4.tar.gz", hash = "sha256:24857fa1793917dd9ccf0c7810a07e05404ce9b823521c7dce22a4fb5d125f80"},
]
[[package]]
name = "email-validator"
version = "1.1.3"
@ -860,20 +851,15 @@ requests = "*"
[[package]]
name = "filelock"
version = "3.15.4"
version = "3.0.12"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
python-versions = "*"
files = [
{file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
{file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
{file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"},
{file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "flanker"
version = "0.9.11"
@ -1372,6 +1358,7 @@ files = [
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
@ -1380,6 +1367,7 @@ files = [
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
@ -1409,6 +1397,7 @@ files = [
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
@ -1417,6 +1406,7 @@ files = [
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
@ -1505,17 +1495,17 @@ pyreadline = {version = "*", markers = "sys_platform == \"win32\""}
[[package]]
name = "identify"
version = "2.6.0"
version = "1.5.5"
description = "File identification library for Python"
optional = false
python-versions = ">=3.8"
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
files = [
{file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"},
{file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"},
{file = "identify-1.5.5-py2.py3-none-any.whl", hash = "sha256:da683bfb7669fa749fc7731f378229e2dbf29a1d1337cbde04106f02236eb29d"},
{file = "identify-1.5.5.tar.gz", hash = "sha256:7c22c384a2c9b32c5cc891d13f923f6b2653aa83e2d75d8f79be240d6c86c4f4"},
]
[package.extras]
license = ["ukkonen"]
license = ["editdistance"]
[[package]]
name = "idna"
@ -1528,6 +1518,25 @@ files = [
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
]
[[package]]
name = "importlib-metadata"
version = "4.12.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
{file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"]
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
[[package]]
name = "iniconfig"
version = "1.0.1"
@ -1660,31 +1669,6 @@ files = [
{file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"},
]
[[package]]
name = "jsbeautifier"
version = "1.15.1"
description = "JavaScript unobfuscator and beautifier."
optional = false
python-versions = "*"
files = [
{file = "jsbeautifier-1.15.1.tar.gz", hash = "sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24"},
]
[package.dependencies]
editorconfig = ">=0.12.2"
six = ">=1.13.0"
[[package]]
name = "json5"
version = "0.9.25"
description = "A Python implementation of the JSON5 data format."
optional = false
python-versions = ">=3.8"
files = [
{file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"},
{file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"},
]
[[package]]
name = "jwcrypto"
version = "0.8"
@ -1975,13 +1959,13 @@ urllib3 = ">=1.7,<2"
[[package]]
name = "nodeenv"
version = "1.9.1"
version = "1.5.0"
description = "Node.js virtual environment builder"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
python-versions = "*"
files = [
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
{file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"},
{file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"},
]
[[package]]
@ -2031,13 +2015,13 @@ testing = ["docopt", "pytest (>=3.0.7)"]
[[package]]
name = "pathspec"
version = "0.12.1"
version = "0.9.0"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
[[package]]
@ -2133,13 +2117,13 @@ files = [
[[package]]
name = "pre-commit"
version = "3.8.0"
version = "2.17.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.6.1"
files = [
{file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"},
{file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"},
{file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"},
{file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"},
]
[package.dependencies]
@ -2147,7 +2131,8 @@ cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
toml = "*"
virtualenv = ">=20.0.8"
[[package]]
name = "prompt-toolkit"
@ -2165,22 +2150,24 @@ wcwidth = "*"
[[package]]
name = "protobuf"
version = "5.27.1"
version = "4.24.3"
description = ""
optional = false
python-versions = ">=3.8"
python-versions = ">=3.7"
files = [
{file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"},
{file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"},
{file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"},
{file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"},
{file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"},
{file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"},
{file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"},
{file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"},
{file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"},
{file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
{file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
{file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
{file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
{file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
{file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
{file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
{file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
{file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
{file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
{file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
]
[[package]]
@ -2680,104 +2667,85 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"
[[package]]
name = "regex"
version = "2023.12.25"
version = "2022.6.2"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.7"
python-versions = ">=3.6"
files = [
{file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
{file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"},
{file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"},
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"},
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"},
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"},
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"},
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"},
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"},
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"},
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"},
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"},
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"},
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"},
{file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"},
{file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"},
{file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"},
{file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"},
{file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"},
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"},
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"},
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"},
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"},
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"},
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"},
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"},
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"},
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"},
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"},
{file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"},
{file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"},
{file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"},
{file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"},
{file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"},
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"},
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"},
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"},
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"},
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"},
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"},
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"},
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"},
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"},
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"},
{file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"},
{file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"},
{file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"},
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"},
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"},
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"},
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"},
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"},
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"},
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"},
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"},
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"},
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"},
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"},
{file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"},
{file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"},
{file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"},
{file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"},
{file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"},
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"},
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"},
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"},
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"},
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"},
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"},
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"},
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"},
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"},
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"},
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"},
{file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"},
{file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"},
{file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"},
{file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"},
{file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"},
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"},
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"},
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"},
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"},
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"},
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"},
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"},
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"},
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"},
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"},
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"},
{file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"},
{file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"},
{file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"},
{file = "regex-2022.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:042d122f9fee3ceb6d7e3067d56557df697d1aad4ff5f64ecce4dc13a90a7c01"},
{file = "regex-2022.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffef4b30785dc2d1604dfb7cf9fca5dc27cd86d65f7c2a9ec34d6d3ae4565ec2"},
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0afa6a601acf3c0dc6de4e8d7d8bbce4e82f8542df746226cd35d4a6c15e9456"},
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a11cbe8eb5fb332ae474895b5ead99392a4ea568bd2a258ab8df883e9c2bf92"},
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c1f62ee2ba880e221bc950651a1a4b0176083d70a066c83a50ef0cb9b178e12"},
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aba3d13c77173e9bfed2c2cea7fc319f11c89a36fcec08755e8fb169cf3b0df"},
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249437f7f5b233792234aeeecb14b0aab1566280de42dfc97c26e6f718297d68"},
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:179410c79fa86ef318d58ace233f95b87b05a1db6dc493fa29404a43f4b215e2"},
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5e201b1232d81ca1a7a22ab2f08e1eccad4e111579fd7f3bbf60b21ef4a16cea"},
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fdecb225d0f1d50d4b26ac423e0032e76d46a788b83b4e299a520717a47d968c"},
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:be57f9c7b0b423c66c266a26ad143b2c5514997c05dd32ce7ca95c8b209c2288"},
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ed657a07d8a47ef447224ea00478f1c7095065dfe70a89e7280e5f50a5725131"},
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:24908aefed23dd065b4a668c0b4ca04d56b7f09d8c8e89636cf6c24e64e67a1e"},
{file = "regex-2022.6.2-cp310-cp310-win32.whl", hash = "sha256:775694cd0bb2c4accf2f1cdd007381b33ec8b59842736fe61bdbad45f2ac7427"},
{file = "regex-2022.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:809bbbbbcf8258049b031d80932ba71627d2274029386f0452e9950bcfa2c6e8"},
{file = "regex-2022.6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2b5d983eb0adf2049d41f95205bdc3de4e6cc2350e9c80d4409d3a75229de"},
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4c101746a8dac0401abefa716b357c546e61ea2e3d4a564a9db9eac57ccbce"},
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:166ae7674d0a0e0f8044e7335ba86d0716c9d49465cff1b153f908e0470b8300"},
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5eac5d8a8ac9ccf00805d02a968a36f5c967db6c7d2b747ab9ed782b3b3a28b"},
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57823f35b18d82b201c1b27ce4e55f88e79e81d9ca07b50ce625d33823e1439"},
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d42e3b7b23473729adbf76103e7df75f9167a5a80b1257ca30688352b4bb2dc"},
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2932e728bee0a634fe55ee54d598054a5a9ffe4cd2be21ba2b4b8e5f8064c2c"},
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:17764683ea01c2b8f103d99ae9de2473a74340df13ce306c49a721f0b1f0eb9e"},
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:2ac29b834100d2c171085ceba0d4a1e7046c434ddffc1434dbc7f9d59af1e945"},
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:f43522fb5d676c99282ca4e2d41e8e2388427c0cf703db6b4a66e49b10b699a8"},
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:9faa01818dad9111dbf2af26c6e3c45140ccbd1192c3a0981f196255bf7ec5e6"},
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:17443f99b8f255273731f915fdbfea4d78d809bb9c3aaf67b889039825d06515"},
{file = "regex-2022.6.2-cp36-cp36m-win32.whl", hash = "sha256:4a5449adef907919d4ce7a1eab2e27d0211d1b255bf0b8f5dd330ad8707e0fc3"},
{file = "regex-2022.6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4d206703a96a39763b5b45cf42645776f5553768ea7f3c2c1a39a4f59cafd4ba"},
{file = "regex-2022.6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fcd7c432202bcb8b642c3f43d5bcafc5930d82fe5b2bf2c008162df258445c1d"},
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:186c5a4a4c40621f64d771038ede20fca6c61a9faa8178f9e305aaa0c2442a97"},
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:047b2d1323a51190c01b6604f49fe09682a5c85d3c1b2c8b67c1cd68419ce3c4"},
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30637e7fa4acfed444525b1ab9683f714be617862820578c9fd4e944d4d9ad1f"},
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adafe6f2c6d86dbf3313866b61180530ca4dcd0c264932dc8fa1ffb10871d58"},
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67ae3601edf86e15ebe40885e5bfdd6002d34879070be15cf18fc0d80ea24fed"},
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:48dddddce0ea7e7c3e92c1e0c5a28c13ca4dc9cf7e996c706d00479652bff76c"},
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:68e5c641645351eb9eb12c465876e76b53717f99e9b92aea7a2dd645a87aa7aa"},
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8fd5f8ae42f789538bb634bdfd69b9aa357e76fdfd7ad720f32f8994c0d84f1e"},
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:71988a76fcb68cc091e901fddbcac0f9ad9a475da222c47d3cf8db0876cb5344"},
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4b8838f70be3ce9e706df9d72f88a0aa7d4c1fea61488e06fdf292ccb70ad2be"},
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:663dca677bd3d2e2b5b7d0329e9f24247e6f38f3b740dd9a778a8ef41a76af41"},
{file = "regex-2022.6.2-cp37-cp37m-win32.whl", hash = "sha256:24963f0b13cc63db336d8da2a533986419890d128c551baacd934c249d51a779"},
{file = "regex-2022.6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ceff75127f828dfe7ceb17b94113ec2df4df274c4cd5533bb299cb099a18a8ca"},
{file = "regex-2022.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a6f2698cfa8340dfe4c0597782776b393ba2274fe4c079900c7c74f68752705"},
{file = "regex-2022.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8a08ace913c4101f0dc0be605c108a3761842efd5f41a3005565ee5d169fb2b"},
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26dbe90b724efef7820c3cf4a0e5be7f130149f3d2762782e4e8ac2aea284a0b"},
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5f759a1726b995dc896e86f17f9c0582b54eb4ead00ed5ef0b5b22260eaf2d0"},
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fc26bb3415e7aa7495c000a2c13bf08ce037775db98c1a3fac9ff04478b6930"},
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52684da32d9003367dc1a1c07e059b9bbaf135ad0764cd47d8ac3dba2df109bc"},
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c1264eb40a71cf2bff43d6694ab7254438ca19ef330175060262b3c8dd3931a"},
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bc635ab319c9b515236bdf327530acda99be995f9d3b9f148ab1f60b2431e970"},
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:27624b490b5d8880f25dac67e1e2ea93dfef5300b98c6755f585799230d6c746"},
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:555f7596fd1f123f8c3a67974c01d6ef80b9769e04d660d6c1a7cc3e6cff7069"},
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:933e72fbe1829cbd59da2bc51ccd73d73162f087f88521a87a8ec9cb0cf10fa8"},
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cff5c87e941292c97d11dc81bd20679f56a2830f0f0e32f75b8ed6e0eb40f704"},
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c757f3a27b6345de13ef3ca956aa805d7734ce68023e84d0fc74e1f09ce66f7a"},
{file = "regex-2022.6.2-cp38-cp38-win32.whl", hash = "sha256:a58d21dd1a2d6b50ed091554ff85e448fce3fe33a4db8b55d0eba2ca957ed626"},
{file = "regex-2022.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:495a4165172848503303ed05c9d0409428f789acc27050fe2cf0a4549188a7d5"},
{file = "regex-2022.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1ab5cf7d09515548044e69d3a0ec77c63d7b9dfff4afc19653f638b992573126"},
{file = "regex-2022.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c1ea28f0ee6cbe4c0367c939b015d915aa9875f6e061ba1cf0796ca9a3010570"},
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de1ecf26ce85521bf73897828b6d0687cc6cf271fb6ff32ac63d26b21f5e764"},
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7c7044aabdad2329974be2246babcc21d3ede852b3971a90fd8c2056c20360"},
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53d69d77e9cfe468b000314dd656be85bb9e96de088a64f75fe128dfe1bf30dd"},
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8d61883a38b1289fba9944a19a361875b5c0170b83cdcc95ea180247c1b7d3"},
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5429202bef174a3760690d912e3a80060b323199a61cef6c6c29b30ce09fd17"},
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e85b10280cf1e334a7c95629f6cbbfe30b815a4ea5f1e28d31f79eb92c2c3d93"},
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c400dfed4137f32127ea4063447006d7153c974c680bf0fb1b724cce9f8567fc"},
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f648037c503985aed39f85088acab6f1eb6a0482d7c6c665a5712c9ad9eaefc"},
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e7b2ff451f6c305b516281ec45425dd423223c8063218c5310d6f72a0a7a517c"},
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:be456b4313a86be41706319c397c09d9fdd2e5cdfde208292a277b867e99e3d1"},
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c3db393b21b53d7e1d3f881b64c29d886cbfdd3df007e31de68b329edbab7d02"},
{file = "regex-2022.6.2-cp39-cp39-win32.whl", hash = "sha256:d70596f20a03cb5f935d6e4aad9170a490d88fc4633679bf00c652e9def4619e"},
{file = "regex-2022.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:3b9b6289e03dbe6a6096880d8ac166cb23c38b4896ad235edee789d4e8697152"},
{file = "regex-2022.6.2.tar.gz", hash = "sha256:f7b43acb2c46fb2cd506965b2d9cf4c5e64c9c612bac26c1187933c7296bf08c"},
]
[[package]]
@ -3164,6 +3132,17 @@ idna = "*"
requests = ">=2.1.0"
requests-file = ">=1.4"
[[package]]
name = "toml"
version = "0.10.1"
description = "Python Library for Tom's Obvious, Minimal Language"
optional = false
python-versions = "*"
files = [
{file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"},
{file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"},
]
[[package]]
name = "tomli"
version = "2.0.1"
@ -3311,23 +3290,25 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
version = "20.21.1"
version = "20.8.1"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
files = [
{file = "virtualenv-20.21.1-py3-none-any.whl", hash = "sha256:09ddbe1af0c8ed2bb4d6ed226b9e6415718ad18aef9fa0ba023d96b7a8356049"},
{file = "virtualenv-20.21.1.tar.gz", hash = "sha256:4c104ccde994f8b108163cf9ba58f3d11511d9403de87fb9b4f52bf33dbc8668"},
{file = "virtualenv-20.8.1-py2.py3-none-any.whl", hash = "sha256:10062e34c204b5e4ec5f62e6ef2473f8ba76513a9a617e873f1f8fb4a519d300"},
{file = "virtualenv-20.8.1.tar.gz", hash = "sha256:bcc17f0b3a29670dd777d6f0755a4c04f28815395bca279cdcb213b97199a6b8"},
]
[package.dependencies]
distlib = ">=0.3.6,<1"
filelock = ">=3.4.1,<4"
platformdirs = ">=2.4,<4"
"backports.entry-points-selectable" = ">=1.0.4"
distlib = ">=0.3.1,<1"
filelock = ">=3.0.0,<4"
platformdirs = ">=2,<3"
six = ">=1.9.0,<2"
[package.extras]
docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"]
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"]
[[package]]
name = "watchtower"
@ -3626,6 +3607,21 @@ files = [
idna = ">=2.0"
multidict = ">=4.0"
[[package]]
name = "zipp"
version = "3.2.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.6"
files = [
{file = "zipp-3.2.0-py3-none-any.whl", hash = "sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6"},
{file = "zipp-3.2.0.tar.gz", hash = "sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f"},
]
[package.extras]
docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["func-timeout", "jaraco.itertools", "jaraco.test (>=3.2.0)", "pytest (>=3.5,!=3.7.3)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8", "pytest-mypy"]
[[package]]
name = "zope.event"
version = "4.5.0"
@ -3704,4 +3700,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "22b9a61e9999a215aacb889b3790ee1a6840ce249aea2e3d16c6113243d5c126"
content-hash = "01afc410d21eeac0a0ac7e8ef6eeb0a991cf4bc091c3351049263462e205ff63"

View File

@ -1,52 +0,0 @@
syntax = "proto3";
package simplelogin_events;
message UserPlanChanged {
uint32 plan_end_time = 1;
}
message UserDeleted {
}
message AliasCreated {
uint32 id = 1;
string email = 2;
string note = 3;
bool enabled = 4;
uint32 created_at = 5;
}
message AliasStatusChanged {
uint32 id = 1;
string email = 2;
bool enabled = 3;
uint32 created_at = 4;
}
message AliasDeleted {
uint32 id = 1;
string email = 2;
}
message AliasCreatedList {
repeated AliasCreated events = 1;
}
message EventContent {
oneof content {
UserPlanChanged user_plan_change = 1;
UserDeleted user_deleted = 2;
AliasCreated alias_created = 3;
AliasStatusChanged alias_status_change = 4;
AliasDeleted alias_deleted = 5;
AliasCreatedList alias_create_list = 6;
}
}
message Event {
uint32 user_id = 1;
string external_user_id = 2;
uint32 partner_id = 3;
EventContent content = 4;
}

View File

@ -14,14 +14,13 @@ exclude = '''
| build
| dist
| migrations # migrations/ is generated by alembic
| app/events/generated
)/
)
'''
[tool.ruff]
ignore-init-module-imports = true
exclude = [".venv", "migrations", "app/events/generated"]
exclude = [".venv", "migrations"]
[tool.djlint]
indent = 2
@ -121,13 +120,13 @@ aiospamc = "0.10"
[tool.poetry.dev-dependencies]
pytest = "^7.0.0"
pytest-cov = "^3.0.0"
pre-commit = "^2.17.0"
black = "^22.1.0"
djlint = "^1.3.0"
pylint = "^2.14.4"
[tool.poetry.group.dev.dependencies]
ruff = "^0.1.5"
pre-commit = "^3.8.0"
[build-system]
requires = ["poetry>=0.12"]

View File

@ -1,24 +0,0 @@
#!/bin/bash
set -euxo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" || exit 1; pwd -P)"
REPO_ROOT=$(echo "${SCRIPT_DIR}" | sed 's:scripts::g')
DEST_DIR="${REPO_ROOT}/app/events/generated"
PROTOC=${PROTOC:-"protoc"}
if ! eval "${PROTOC} --version" &> /dev/null ; then
echo "Cannot find $PROTOC"
exit 1
fi
rm -rf "${DEST_DIR}"
mkdir -p "${DEST_DIR}"
pushd $REPO_ROOT || exit 1
eval "${PROTOC} --proto_path=proto --python_out=\"${DEST_DIR}\" --pyi_out=\"${DEST_DIR}\" proto/event.proto"
popd || exit 1

Some files were not shown because too many files have changed in this diff Show More