Compare commits

..

20 Commits

Author SHA1 Message Date
f1110506c0 4.39.3 2024-02-27 12:00:07 +00:00
f5bce7d7ff 4.39.2 2024-02-23 12:00:07 +00:00
75f45d9365 4.39.1 2024-02-20 12:00:07 +00:00
ead425e0c2 4.38.3 2024-02-14 12:00:07 +00:00
6c910d62c5 4.38.2 2024-02-06 12:00:07 +00:00
99ffd1ec0c 4.38.0 2024-02-03 16:55:23 +00:00
eda940f8b2 4.37.2 2024-01-27 12:00:07 +00:00
1dad582523 4.37.1 2024-01-25 12:00:08 +00:00
e516266a27 4.37.0 2024-01-18 12:00:07 +00:00
850fc95477 4.36.8 2023-12-28 12:00:07 +00:00
d172825900 4.36.7 2023-12-21 12:00:09 +00:00
026865e5bf 4.36.6 2023-12-17 14:56:57 +00:00
add94ef2a2 4.36.5 2023-11-30 12:00:09 +00:00
1081400948 4.36.4 2023-11-22 12:00:09 +00:00
5776128905 4.36.3 2023-11-08 12:00:06 +00:00
d661860f4c 4.35.6 2023-11-07 12:00:06 +00:00
0a52e32972 4.35.3 2023-10-05 12:00:06 +01:00
703dcbd0eb 4.35.2 2023-10-03 12:00:06 +01:00
ce7ed69547 4.35.1 2023-10-02 12:00:06 +01:00
4f5564df16 4.35.0 2023-09-29 12:00:06 +01:00
117 changed files with 1495 additions and 370902 deletions

View File

@ -15,9 +15,15 @@ jobs:
- uses: actions/setup-python@v4 - uses: actions/setup-python@v4
with: with:
python-version: '3.9' python-version: '3.10'
cache: 'poetry' cache: 'poetry'
- name: Install OS dependencies
if: ${{ matrix.python-version }} == '3.10'
run: |
sudo apt update
sudo apt install -y libre2-dev libpq-dev
- name: Install dependencies - name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction run: poetry install --no-interaction

View File

@ -7,18 +7,19 @@ repos:
hooks: hooks:
- id: check-yaml - id: check-yaml
- id: trailing-whitespace - id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 3.9.2
hooks:
- id: flake8
- repo: https://github.com/Riverside-Healthcare/djLint - repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.3.0 rev: v1.3.0
hooks: hooks:
- id: djlint-jinja - id: djlint-jinja
files: '.*\.html' files: '.*\.html'
entry: djlint --reformat entry: djlint --reformat
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.5
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format

View File

@ -23,7 +23,7 @@ COPY poetry.lock pyproject.toml ./
# Install and setup poetry # Install and setup poetry
RUN pip install -U pip \ RUN pip install -U pip \
&& apt-get update \ && apt-get update \
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev \ && apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
&& curl -sSL https://install.python-poetry.org | python3 - \ && curl -sSL https://install.python-poetry.org | python3 - \
# Remove curl and netcat from the image # Remove curl and netcat from the image
&& apt-get purge -y curl netcat-traditional \ && apt-get purge -y curl netcat-traditional \
@ -31,7 +31,7 @@ RUN pip install -U pip \
&& poetry config virtualenvs.create false \ && poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi --no-root \ && poetry install --no-interaction --no-ansi --no-root \
# Clear apt cache \ # Clear apt cache \
&& apt-get purge -y libre2-dev \ && apt-get purge -y libre2-dev cmake ninja-build\
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*

View File

@ -74,7 +74,7 @@ Setting up DKIM is highly recommended to reduce the chance your emails ending up
First you need to generate a private and public key for DKIM: First you need to generate a private and public key for DKIM:
```bash ```bash
openssl genrsa -out dkim.key 1024 openssl genrsa -out dkim.key -traditional 1024
openssl rsa -in dkim.key -pubout -out dkim.pub.key openssl rsa -in dkim.key -pubout -out dkim.pub.key
``` ```
@ -511,10 +511,13 @@ server {
location / { location / {
proxy_pass http://localhost:7777; proxy_pass http://localhost:7777;
proxy_set_header Host $host;
} }
} }
``` ```
Note: If `/etc/nginx/sites-enabled/default` exists, delete it or certbot will fail due to the conflict. The `simplelogin` file should be the only file in `sites-enabled`.
Reload Nginx with the command below Reload Nginx with the command below
```bash ```bash

View File

@ -5,10 +5,11 @@ from typing import Optional
from arrow import Arrow from arrow import Arrow
from newrelic import agent from newrelic import agent
from sqlalchemy import or_
from app.db import Session from app.db import Session
from app.email_utils import send_welcome_email from app.email_utils import send_welcome_email
from app.utils import sanitize_email from app.utils import sanitize_email, canonicalize_email
from app.errors import ( from app.errors import (
AccountAlreadyLinkedToAnotherPartnerException, AccountAlreadyLinkedToAnotherPartnerException,
AccountIsUsingAliasAsEmail, AccountIsUsingAliasAsEmail,
@ -131,8 +132,9 @@ class ClientMergeStrategy(ABC):
class NewUserStrategy(ClientMergeStrategy): class NewUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult: def process(self) -> LinkResult:
# Will create a new SL User with a random password # Will create a new SL User with a random password
canonical_email = canonicalize_email(self.link_request.email)
new_user = User.create( new_user = User.create(
email=self.link_request.email, email=canonical_email,
name=self.link_request.name, name=self.link_request.name,
password=random_string(20), password=random_string(20),
activated=True, activated=True,
@ -166,7 +168,8 @@ class NewUserStrategy(ClientMergeStrategy):
class ExistingUnlinkedUserStrategy(ClientMergeStrategy): class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult: def process(self) -> LinkResult:
# IF it was scheduled to be deleted. Unschedule it.
self.user.delete_on = None
partner_user = ensure_partner_user_exists_for_user( partner_user = ensure_partner_user_exists_for_user(
self.link_request, self.user, self.partner self.link_request, self.user, self.partner
) )
@ -213,11 +216,21 @@ def process_login_case(
partner_id=partner.id, external_user_id=link_request.external_user_id partner_id=partner.id, external_user_id=link_request.external_user_id
) )
if partner_user is None: if partner_user is None:
canonical_email = canonicalize_email(link_request.email)
# We didn't find any SimpleLogin user registered with that partner user id # We didn't find any SimpleLogin user registered with that partner user id
# Make sure they aren't using an alias as their link email # Make sure they aren't using an alias as their link email
check_alias(link_request.email) check_alias(link_request.email)
check_alias(canonical_email)
# Try to find it using the partner's e-mail address # Try to find it using the partner's e-mail address
user = User.get_by(email=link_request.email) users = User.filter(
or_(User.email == link_request.email, User.email == canonical_email)
).all()
if len(users) > 1:
user = [user for user in users if user.email == canonical_email][0]
elif len(users) == 1:
user = users[0]
else:
user = None
return get_login_strategy(link_request, user, partner).process() return get_login_strategy(link_request, user, partner).process()
else: else:
# We found the SL user registered with that partner user id # We found the SL user registered with that partner user id
@ -235,6 +248,8 @@ def link_user(
) -> LinkResult: ) -> LinkResult:
# Sanitize email just in case # Sanitize email just in case
link_request.email = sanitize_email(link_request.email) link_request.email = sanitize_email(link_request.email)
# If it was scheduled to be deleted. Unschedule it.
current_user.delete_on = None
partner_user = ensure_partner_user_exists_for_user( partner_user = ensure_partner_user_exists_for_user(
link_request, current_user, partner link_request, current_user, partner
) )

View File

@ -214,6 +214,20 @@ class UserAdmin(SLModelView):
Session.commit() Session.commit()
@action(
"remove trial",
"Stop trial period",
"Remove trial for this user?",
)
def stop_trial(self, ids):
for user in User.filter(User.id.in_(ids)):
user.trial_end = None
flash(f"Stopped trial for {user}", "success")
AdminAuditLog.stop_trial(current_user.id, user.id)
Session.commit()
@action( @action(
"disable_otp_fido", "disable_otp_fido",
"Disable OTP & FIDO", "Disable OTP & FIDO",
@ -256,6 +270,17 @@ class UserAdmin(SLModelView):
Session.commit() Session.commit()
@action(
"clear_delete_on",
"Remove scheduled deletion of user",
"This will remove the scheduled deletion for this users",
)
def clean_delete_on(self, ids):
for user in User.filter(User.id.in_(ids)):
user.delete_on = None
Session.commit()
# @action( # @action(
# "login_as", # "login_as",
# "Login as this user", # "Login as this user",
@ -600,6 +625,26 @@ class NewsletterAdmin(SLModelView):
else: else:
flash(error_msg, "error") flash(error_msg, "error")
@action(
"clone_newsletter",
"Clone this newsletter",
)
def clone_newsletter(self, newsletter_ids):
if len(newsletter_ids) != 1:
flash("you can only select 1 newsletter", "error")
return
newsletter_id = newsletter_ids[0]
newsletter: Newsletter = Newsletter.get(newsletter_id)
new_newsletter = Newsletter.create(
subject=newsletter.subject,
html=newsletter.html,
plain_text=newsletter.plain_text,
commit=True,
)
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
class NewsletterUserAdmin(SLModelView): class NewsletterUserAdmin(SLModelView):
column_searchable_list = ["id"] column_searchable_list = ["id"]

View File

@ -70,7 +70,6 @@ def verify_prefix_suffix(
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty # when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
and not config.DISABLE_ALIAS_SUFFIX and not config.DISABLE_ALIAS_SUFFIX
): ):
if not alias_domain_prefix.startswith("."): if not alias_domain_prefix.startswith("."):
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix) LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
return False return False

View File

@ -21,6 +21,8 @@ from app.email_utils import (
send_cannot_create_directory_alias_disabled, send_cannot_create_directory_alias_disabled,
get_email_local_part, get_email_local_part,
send_cannot_create_domain_alias, send_cannot_create_domain_alias,
send_email,
render,
) )
from app.errors import AliasInTrashError from app.errors import AliasInTrashError
from app.log import LOG from app.log import LOG
@ -36,6 +38,8 @@ from app.models import (
EmailLog, EmailLog,
Contact, Contact,
AutoCreateRule, AutoCreateRule,
AliasUsedOn,
ClientUser,
) )
from app.regex_utils import regex_match from app.regex_utils import regex_match
@ -399,3 +403,58 @@ def alias_export_csv(user, csv_direct_export=False):
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv" output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
output.headers["Content-type"] = "text/csv" output.headers["Content-type"] = "text/csv"
return output return output
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
# cannot transfer alias which is used for receiving newsletter
if User.get_by(newsletter_alias_id=alias.id):
raise Exception("Cannot transfer alias that's used to receive newsletter")
# update user_id
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
{"user_id": new_user.id}
)
# remove existing mailboxes from the alias
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
# set mailboxes
alias.mailbox_id = new_mailboxes.pop().id
for mb in new_mailboxes:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
# alias has never been transferred before
if not alias.original_owner_id:
alias.original_owner_id = alias.user_id
# inform previous owner
old_user = alias.user
send_email(
old_user.email,
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
alias=alias,
),
render(
"transactional/alias-transferred.html",
alias=alias,
),
)
# now the alias belongs to the new user
alias.user_id = new_user.id
# set some fields back to default
alias.disable_pgp = False
alias.pinned = False
Session.commit()

View File

@ -16,3 +16,22 @@ from .views import (
sudo, sudo,
user, user,
) )
__all__ = [
"alias_options",
"new_custom_alias",
"custom_domain",
"new_random_alias",
"user_info",
"auth",
"auth_mfa",
"alias",
"apple",
"mailbox",
"notification",
"setting",
"export",
"phone",
"sudo",
"user",
]

View File

@ -33,6 +33,9 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if g.user.disabled: if g.user.disabled:
return jsonify(error="Disabled account"), 403 return jsonify(error="Disabled account"), 403
if not g.user.is_active():
return jsonify(error="Account does not exist"), 401
g.api_key = api_key g.api_key = api_key
return None return None

View File

@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
q = q.order_by(Alias.pinned.desc()) q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc()) q = q.order_by(latest_activity.desc())
q = list(q.limit(page_limit).offset(page_id * page_size)) q = q.limit(page_limit).offset(page_id * page_size)
ret = [] ret = []
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q: for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
ret.append( ret.append(
AliasInfo( AliasInfo(
alias=alias, alias=alias,
@ -358,7 +358,6 @@ def construct_alias_query(user: User):
else_=0, else_=0,
) )
).label("nb_forward"), ).label("nb_forward"),
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
) )
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True) .join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
.filter(Alias.user_id == user.id) .filter(Alias.user_id == user.id)
@ -366,14 +365,6 @@ def construct_alias_query(user: User):
.subquery() .subquery()
) )
alias_contact_subquery = (
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
return ( return (
Session.query( Session.query(
Alias, Alias,
@ -385,23 +376,7 @@ def construct_alias_query(user: User):
) )
.options(joinedload(Alias.hibp_breaches)) .options(joinedload(Alias.hibp_breaches))
.options(joinedload(Alias.custom_domain)) .options(joinedload(Alias.custom_domain))
.join(Contact, Alias.id == Contact.alias_id, isouter=True) .join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True) .join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
.filter(Alias.id == alias_activity_subquery.c.id) .filter(Alias.id == alias_activity_subquery.c.id)
.filter(Alias.id == alias_contact_subquery.c.id)
.filter(
or_(
EmailLog.created_at
== alias_activity_subquery.c.latest_email_log_created_at,
and_(
# no email log yet for this alias
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
# to make sure only 1 contact is returned in this case
or_(
Contact.id == alias_contact_subquery.c.max_contact_id,
alias_contact_subquery.c.max_contact_id.is_(None),
),
),
)
)
) )

View File

@ -24,12 +24,14 @@ from app.errors import (
ErrContactAlreadyExists, ErrContactAlreadyExists,
ErrAddressInvalid, ErrAddressInvalid,
) )
from app.extensions import limiter
from app.models import Alias, Contact, Mailbox, AliasMailbox from app.models import Alias, Contact, Mailbox, AliasMailbox
@deprecated @deprecated
@api_bp.route("/aliases", methods=["GET", "POST"]) @api_bp.route("/aliases", methods=["GET", "POST"])
@require_api_auth @require_api_auth
@limiter.limit("10/minute", key_func=lambda: g.user.id)
def get_aliases(): def get_aliases():
""" """
Get aliases Get aliases
@ -72,6 +74,7 @@ def get_aliases():
@api_bp.route("/v2/aliases", methods=["GET", "POST"]) @api_bp.route("/v2/aliases", methods=["GET", "POST"])
@require_api_auth @require_api_auth
@limiter.limit("50/minute", key_func=lambda: g.user.id)
def get_aliases_v2(): def get_aliases_v2():
""" """
Get aliases Get aliases

View File

@ -17,9 +17,14 @@ from app.models import PlanEnum, AppleSubscription
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly" _MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly" _YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
# SL Mac app used to be in SL account
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly" _MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly" _MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
# SL Mac app is moved to Proton account
_MACAPP_MONTHLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.yearly"
# Apple API URL # Apple API URL
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt" _SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt" _PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
@ -263,7 +268,11 @@ def apple_update_notification():
plan = ( plan = (
PlanEnum.monthly PlanEnum.monthly
if transaction["product_id"] if transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID) in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
else PlanEnum.yearly else PlanEnum.yearly
) )
@ -517,7 +526,11 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
plan = ( plan = (
PlanEnum.monthly PlanEnum.monthly
if latest_transaction["product_id"] if latest_transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID) in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
else PlanEnum.yearly else PlanEnum.yearly
) )

View File

@ -63,6 +63,11 @@ def auth_login():
elif user.disabled: elif user.disabled:
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send() LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
return jsonify(error="Account disabled"), 400 return jsonify(error="Account disabled"), 400
elif user.delete_on is not None:
LoginEvent(
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
).send()
return jsonify(error="Account scheduled for deletion"), 400
elif not user.activated: elif not user.activated:
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send() LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
return jsonify(error="Account not activated"), 422 return jsonify(error="Account not activated"), 422

View File

@ -45,7 +45,7 @@ def create_mailbox():
mailbox_email = sanitize_email(request.get_json().get("email")) mailbox_email = sanitize_email(request.get_json().get("email"))
if not user.is_premium(): if not user.is_premium():
return jsonify(error=f"Only premium plan can add additional mailbox"), 400 return jsonify(error="Only premium plan can add additional mailbox"), 400
if not is_valid_email(mailbox_email): if not is_valid_email(mailbox_email):
return jsonify(error=f"{mailbox_email} invalid"), 400 return jsonify(error=f"{mailbox_email} invalid"), 400

View File

@ -150,7 +150,7 @@ def new_custom_alias_v3():
if not data: if not data:
return jsonify(error="request body cannot be empty"), 400 return jsonify(error="request body cannot be empty"), 400
if type(data) is not dict: if not isinstance(data, dict):
return jsonify(error="request body does not follow the required format"), 400 return jsonify(error="request body does not follow the required format"), 400
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "") alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
@ -168,7 +168,7 @@ def new_custom_alias_v3():
return jsonify(error="alias prefix invalid format or too long"), 400 return jsonify(error="alias prefix invalid format or too long"), 400
# check if mailbox is not tempered with # check if mailbox is not tempered with
if type(mailbox_ids) is not list: if not isinstance(mailbox_ids, list):
return jsonify(error="mailbox_ids must be an array of id"), 400 return jsonify(error="mailbox_ids must be an array of id"), 400
mailboxes = [] mailboxes = []
for mailbox_id in mailbox_ids: for mailbox_id in mailbox_ids:

View File

@ -32,6 +32,7 @@ def user_to_dict(user: User) -> dict:
"in_trial": user.in_trial(), "in_trial": user.in_trial(),
"max_alias_free_plan": user.max_alias_for_free_account(), "max_alias_free_plan": user.max_alias_for_free_account(),
"connected_proton_address": None, "connected_proton_address": None,
"can_create_reverse_alias": user.can_create_contacts(),
} }
if config.CONNECT_WITH_PROTON: if config.CONNECT_WITH_PROTON:
@ -58,6 +59,7 @@ def user_info():
- in_trial - in_trial
- max_alias_free - max_alias_free
- is_connected_with_proton - is_connected_with_proton
- can_create_reverse_alias
""" """
user = g.user user = g.user

View File

@ -17,3 +17,23 @@ from .views import (
recovery, recovery,
api_to_cookie, api_to_cookie,
) )
__all__ = [
"login",
"logout",
"register",
"activate",
"resend_activation",
"reset_password",
"forgot_password",
"github",
"google",
"facebook",
"proton",
"change_email",
"mfa",
"fido",
"social",
"recovery",
"api_to_cookie",
]

View File

@ -62,7 +62,7 @@ def fido():
browser = MfaBrowser.get_by(token=request.cookies.get("mfa")) browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
if browser and not browser.is_expired() and browser.user_id == user.id: if browser and not browser.is_expired() and browser.user_id == user.id:
login_user(user) login_user(user)
flash(f"Welcome back!", "success") flash("Welcome back!", "success")
# Redirect user to correct page # Redirect user to correct page
return redirect(next_url or url_for("dashboard.index")) return redirect(next_url or url_for("dashboard.index"))
else: else:
@ -110,7 +110,7 @@ def fido():
session["sudo_time"] = int(time()) session["sudo_time"] = int(time())
login_user(user) login_user(user)
flash(f"Welcome back!", "success") flash("Welcome back!", "success")
# Redirect user to correct page # Redirect user to correct page
response = make_response(redirect(next_url or url_for("dashboard.index"))) response = make_response(redirect(next_url or url_for("dashboard.index")))

View File

@ -7,7 +7,7 @@ from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
from app.db import Session from app.db import Session
from app.log import LOG from app.log import LOG
from app.models import User, File, SocialAuth from app.models import User, File, SocialAuth
from app.utils import random_string, sanitize_email from app.utils import random_string, sanitize_email, sanitize_next_url
from .login_utils import after_login from .login_utils import after_login
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth" _authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
@ -29,7 +29,7 @@ def google_login():
# to avoid flask-login displaying the login error message # to avoid flask-login displaying the login error message
session.pop("_flashes", None) session.pop("_flashes", None)
next_url = request.args.get("next") next_url = sanitize_next_url(request.args.get("next"))
# Google does not allow to append param to redirect_url # Google does not allow to append param to redirect_url
# we need to pass the next url by session # we need to pass the next url by session

View File

@ -54,6 +54,12 @@ def login():
"error", "error",
) )
LoginEvent(LoginEvent.ActionType.disabled_login).send() LoginEvent(LoginEvent.ActionType.disabled_login).send()
elif user.delete_on is not None:
flash(
f"Your account is scheduled to be deleted on {user.delete_on}",
"error",
)
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
elif not user.activated: elif not user.activated:
show_resend_activation = True show_resend_activation = True
flash( flash(

View File

@ -55,7 +55,7 @@ def mfa():
browser = MfaBrowser.get_by(token=request.cookies.get("mfa")) browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
if browser and not browser.is_expired() and browser.user_id == user.id: if browser and not browser.is_expired() and browser.user_id == user.id:
login_user(user) login_user(user)
flash(f"Welcome back!", "success") flash("Welcome back!", "success")
# Redirect user to correct page # Redirect user to correct page
return redirect(next_url or url_for("dashboard.index")) return redirect(next_url or url_for("dashboard.index"))
else: else:
@ -73,7 +73,7 @@ def mfa():
Session.commit() Session.commit()
login_user(user) login_user(user)
flash(f"Welcome back!", "success") flash("Welcome back!", "success")
# Redirect user to correct page # Redirect user to correct page
response = make_response(redirect(next_url or url_for("dashboard.index"))) response = make_response(redirect(next_url or url_for("dashboard.index")))

View File

@ -53,7 +53,7 @@ def recovery_route():
del session[MFA_USER_ID] del session[MFA_USER_ID]
login_user(user) login_user(user)
flash(f"Welcome back!", "success") flash("Welcome back!", "success")
recovery_code.used = True recovery_code.used = True
recovery_code.used_at = arrow.now() recovery_code.used_at = arrow.now()

View File

@ -94,9 +94,7 @@ def register():
try: try:
send_activation_email(user, next_url) send_activation_email(user, next_url)
RegisterEvent(RegisterEvent.ActionType.success).send() RegisterEvent(RegisterEvent.ActionType.success).send()
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += ( DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
1
)
Session.commit() Session.commit()
except Exception: except Exception:
flash("Invalid email, are you sure the email is correct?", "error") flash("Invalid email, are you sure the email is correct?", "error")

View File

@ -179,6 +179,7 @@ AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
BUCKET = os.environ.get("BUCKET") BUCKET = os.environ.get("BUCKET")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
# Paddle # Paddle
try: try:
@ -488,7 +489,34 @@ def setup_nameservers():
NAMESERVERS = setup_nameservers() NAMESERVERS = setup_nameservers()
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = False DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get(
"DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False
)
# Expect format hits,seconds:hits,seconds...
# Example 1,10:4,60 means 1 in the last 10 secs or 4 in the last 60 secs
def getRateLimitFromConfig(
env_var: string, default: string = ""
) -> list[tuple[int, int]]:
value = os.environ.get(env_var, default)
if not value:
return []
entries = [entry for entry in value.split(":")]
limits = []
for entry in entries:
fields = entry.split(",")
limit = (int(fields[0]), int(fields[1]))
limits.append(limit)
return limits
ALIAS_CREATE_RATE_LIMIT_FREE = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_FREE", "10,900:50,3600"
)
ALIAS_CREATE_RATE_LIMIT_PAID = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_PAID", "50,900:200,3600"
)
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or ( PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
FLASK_SECRET + "partnerapitoken" FLASK_SECRET + "partnerapitoken"
) )

View File

@ -33,3 +33,39 @@ from .views import (
notification, notification,
support, support,
) )
__all__ = [
"index",
"pricing",
"setting",
"custom_alias",
"subdomain",
"billing",
"alias_log",
"alias_export",
"unsubscribe",
"api_key",
"custom_domain",
"alias_contact_manager",
"enter_sudo",
"mfa_setup",
"mfa_cancel",
"fido_setup",
"coupon",
"fido_manage",
"domain_detail",
"lifetime_licence",
"directory",
"mailbox",
"mailbox_detail",
"refused_email",
"referral",
"contact_detail",
"setup_done",
"batch_import",
"alias_transfer",
"app",
"delete_account",
"notification",
"support",
]

View File

@ -51,14 +51,6 @@ def email_validator():
return _check return _check
def user_can_create_contacts(user: User) -> bool:
if user.is_premium():
return True
if user.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact: def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
""" """
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS. Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
@ -82,7 +74,7 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
if contact: if contact:
raise ErrContactAlreadyExists(contact) raise ErrContactAlreadyExists(contact)
if not user_can_create_contacts(user): if not user.can_create_contacts():
raise ErrContactErrorUpgradeNeeded() raise ErrContactErrorUpgradeNeeded()
contact = Contact.create( contact = Contact.create(
@ -327,6 +319,6 @@ def alias_contact_manager(alias_id):
last_page=last_page, last_page=last_page,
query=query, query=query,
nb_contact=nb_contact, nb_contact=nb_contact,
can_create_contacts=user_can_create_contacts(current_user), can_create_contacts=current_user.can_create_contacts(),
csrf_form=csrf_form, csrf_form=csrf_form,
) )

View File

@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
contact=contact, contact=contact,
) )
logs.append(al) logs.append(al)
logs = sorted(logs, key=lambda l: l.when, reverse=True) logs = sorted(logs, key=lambda log: log.when, reverse=True)
return logs return logs

View File

@ -7,79 +7,19 @@ from flask import render_template, redirect, url_for, flash, request
from flask_login import login_required, current_user from flask_login import login_required, current_user
from app import config from app import config
from app.alias_utils import transfer_alias
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session from app.db import Session
from app.email_utils import send_email, render
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import ( from app.models import (
Alias, Alias,
Contact,
AliasUsedOn,
AliasMailbox,
User,
ClientUser,
) )
from app.models import Mailbox from app.models import Mailbox
from app.utils import CSRFValidationForm from app.utils import CSRFValidationForm
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
# cannot transfer alias which is used for receiving newsletter
if User.get_by(newsletter_alias_id=alias.id):
raise Exception("Cannot transfer alias that's used to receive newsletter")
# update user_id
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
{"user_id": new_user.id}
)
# remove existing mailboxes from the alias
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
# set mailboxes
alias.mailbox_id = new_mailboxes.pop().id
for mb in new_mailboxes:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
# alias has never been transferred before
if not alias.original_owner_id:
alias.original_owner_id = alias.user_id
# inform previous owner
old_user = alias.user
send_email(
old_user.email,
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
alias=alias,
),
render(
"transactional/alias-transferred.html",
alias=alias,
),
)
# now the alias belongs to the new user
alias.user_id = new_user.id
# set some fields back to default
alias.disable_pgp = False
alias.pinned = False
Session.commit()
def hmac_alias_transfer_token(transfer_token: str) -> str: def hmac_alias_transfer_token(transfer_token: str) -> str:
alias_hmac = hmac.new( alias_hmac = hmac.new(
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"), config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
@ -214,7 +154,7 @@ def alias_transfer_receive_route():
mailboxes, mailboxes,
token, token,
) )
transfer(alias, current_user, mailboxes) transfer_alias(alias, current_user, mailboxes)
# reset transfer token # reset transfer token
alias.transfer_token = None alias.transfer_token = None

View File

@ -1,14 +1,9 @@
from app.db import Session
"""
List of apps that user has used via the "Sign in with SimpleLogin"
"""
from flask import render_template, request, flash, redirect from flask import render_template, request, flash, redirect
from flask_login import login_required, current_user from flask_login import login_required, current_user
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session
from app.models import ( from app.models import (
ClientUser, ClientUser,
) )
@ -17,6 +12,10 @@ from app.models import (
@dashboard_bp.route("/app", methods=["GET", "POST"]) @dashboard_bp.route("/app", methods=["GET", "POST"])
@login_required @login_required
def app_route(): def app_route():
"""
List of apps that user has used via the "Sign in with SimpleLogin"
"""
client_users = ( client_users = (
ClientUser.filter_by(user_id=current_user.id) ClientUser.filter_by(user_id=current_user.id)
.options(joinedload(ClientUser.client)) .options(joinedload(ClientUser.client))

View File

@ -100,7 +100,7 @@ def coupon_route():
commit=True, commit=True,
) )
flash( flash(
f"Your account has been upgraded to Premium, thanks for your support!", "Your account has been upgraded to Premium, thanks for your support!",
"success", "success",
) )

View File

@ -24,6 +24,7 @@ from app.models import (
AliasMailbox, AliasMailbox,
DomainDeletedAlias, DomainDeletedAlias,
) )
from app.utils import CSRFValidationForm
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"]) @dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
@ -48,9 +49,13 @@ def custom_alias():
at_least_a_premium_domain = True at_least_a_premium_domain = True
break break
csrf_form = CSRFValidationForm()
mailboxes = current_user.mailboxes() mailboxes = current_user.mailboxes()
if request.method == "POST": if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "") alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
signed_alias_suffix = request.form.get("signed-alias-suffix") signed_alias_suffix = request.form.get("signed-alias-suffix")
mailbox_ids = request.form.getlist("mailboxes") mailbox_ids = request.form.getlist("mailboxes")
@ -164,4 +169,5 @@ def custom_alias():
alias_suffixes=alias_suffixes, alias_suffixes=alias_suffixes,
at_least_a_premium_domain=at_least_a_premium_domain, at_least_a_premium_domain=at_least_a_premium_domain,
mailboxes=mailboxes, mailboxes=mailboxes,
csrf_form=csrf_form,
) )

View File

@ -67,7 +67,7 @@ def directory():
if request.method == "POST": if request.method == "POST":
if request.form.get("form-name") == "delete": if request.form.get("form-name") == "delete":
if not delete_dir_form.validate(): if not delete_dir_form.validate():
flash(f"Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
dir_obj = Directory.get(delete_dir_form.directory_id.data) dir_obj = Directory.get(delete_dir_form.directory_id.data)
@ -87,7 +87,7 @@ def directory():
if request.form.get("form-name") == "toggle-directory": if request.form.get("form-name") == "toggle-directory":
if not toggle_dir_form.validate(): if not toggle_dir_form.validate():
flash(f"Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
dir_id = toggle_dir_form.directory_id.data dir_id = toggle_dir_form.directory_id.data
dir_obj = Directory.get(dir_id) dir_obj = Directory.get(dir_id)
@ -109,7 +109,7 @@ def directory():
elif request.form.get("form-name") == "update": elif request.form.get("form-name") == "update":
if not update_dir_form.validate(): if not update_dir_form.validate():
flash(f"Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
dir_id = update_dir_form.directory_id.data dir_id = update_dir_form.directory_id.data
dir_obj = Directory.get(dir_id) dir_obj = Directory.get(dir_id)

View File

@ -52,12 +52,13 @@ def get_stats(user: User) -> Stats:
@dashboard_bp.route("/", methods=["GET", "POST"]) @dashboard_bp.route("/", methods=["GET", "POST"])
@login_required
@limiter.limit( @limiter.limit(
ALIAS_LIMIT, ALIAS_LIMIT,
methods=["POST"], methods=["POST"],
exempt_when=lambda: request.form.get("form-name") != "create-random-email", exempt_when=lambda: request.form.get("form-name") != "create-random-email",
) )
@login_required @limiter.limit("10/minute", methods=["GET"], key_func=lambda: current_user.id)
@parallel_limiter.lock( @parallel_limiter.lock(
name="alias_creation", name="alias_creation",
only_when=lambda: request.form.get("form-name") == "create-random-email", only_when=lambda: request.form.get("form-name") == "create-random-email",

View File

@ -128,7 +128,6 @@ def setting():
new_email_valid = True new_email_valid = True
new_email = canonicalize_email(change_email_form.email.data) new_email = canonicalize_email(change_email_form.email.data)
if new_email != current_user.email and not pending_email: if new_email != current_user.email and not pending_email:
# check if this email is not already used # check if this email is not already used
if personal_email_already_used(new_email) or Alias.get_by( if personal_email_already_used(new_email) or Alias.get_by(
email=new_email email=new_email

View File

@ -75,12 +75,11 @@ def block_contact(contact_id):
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"]) @dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
@login_required @login_required
def encoded_unsubscribe(encoded_request: str): def encoded_unsubscribe(encoded_request: str):
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request( unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
current_user, encoded_request current_user, encoded_request
) )
if not unsub_data: if not unsub_data:
flash(f"Invalid unsubscribe request", "error") flash("Invalid unsubscribe request", "error")
return redirect(url_for("dashboard.index")) return redirect(url_for("dashboard.index"))
if unsub_data.action == UnsubscribeAction.DisableAlias: if unsub_data.action == UnsubscribeAction.DisableAlias:
alias = Alias.get(unsub_data.data) alias = Alias.get(unsub_data.data)
@ -97,14 +96,14 @@ def encoded_unsubscribe(encoded_request: str):
) )
) )
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter: if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
flash(f"You've unsubscribed from the newsletter", "success") flash("You've unsubscribed from the newsletter", "success")
return redirect( return redirect(
url_for( url_for(
"dashboard.index", "dashboard.index",
) )
) )
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto: if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
flash(f"The original unsubscribe request has been forwarded", "success") flash("The original unsubscribe request has been forwarded", "success")
return redirect( return redirect(
url_for( url_for(
"dashboard.index", "dashboard.index",

View File

@ -1 +1,3 @@
from .views import index, new_client, client_detail from .views import index, new_client, client_detail
__all__ = ["index", "new_client", "client_detail"]

View File

@ -87,7 +87,7 @@ def client_detail(client_id):
) )
flash( flash(
f"Thanks for submitting, we are informed and will come back to you asap!", "Thanks for submitting, we are informed and will come back to you asap!",
"success", "success",
) )

View File

@ -1 +1,3 @@
from .views import index from .views import index
__all__ = ["index"]

View File

@ -93,7 +93,7 @@ def send_welcome_email(user):
send_email( send_email(
comm_email, comm_email,
f"Welcome to SimpleLogin", "Welcome to SimpleLogin",
render("com/welcome.txt", user=user, alias=alias), render("com/welcome.txt", user=user, alias=alias),
render("com/welcome.html", user=user, alias=alias), render("com/welcome.html", user=user, alias=alias),
unsubscribe_link, unsubscribe_link,
@ -104,7 +104,7 @@ def send_welcome_email(user):
def send_trial_end_soon_email(user): def send_trial_end_soon_email(user):
send_email( send_email(
user.email, user.email,
f"Your trial will end soon", "Your trial will end soon",
render("transactional/trial-end.txt.jinja2", user=user), render("transactional/trial-end.txt.jinja2", user=user),
render("transactional/trial-end.html", user=user), render("transactional/trial-end.html", user=user),
ignore_smtp_error=True, ignore_smtp_error=True,
@ -114,7 +114,7 @@ def send_trial_end_soon_email(user):
def send_activation_email(email, activation_link): def send_activation_email(email, activation_link):
send_email( send_email(
email, email,
f"Just one more step to join SimpleLogin", "Just one more step to join SimpleLogin",
render( render(
"transactional/activation.txt", "transactional/activation.txt",
activation_link=activation_link, activation_link=activation_link,
@ -583,6 +583,26 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
LOG.d("MX Domain %s %s is invalid mailbox domain", mx_domain, domain) LOG.d("MX Domain %s %s is invalid mailbox domain", mx_domain, domain)
return False return False
existing_user = User.get_by(email=email_address)
if existing_user and existing_user.disabled:
LOG.d(
f"User {existing_user} is disabled. {email_address} cannot be used for other mailbox"
)
return False
for existing_user in (
User.query()
.join(Mailbox, User.id == Mailbox.user_id)
.filter(Mailbox.email == email_address)
.group_by(User.id)
.all()
):
if existing_user.disabled:
LOG.d(
f"User {existing_user} is disabled and has a mailbox with {email_address}. Id cannot be used for other mailbox"
)
return False
return True return True
@ -768,7 +788,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
ret = "" ret = ""
for to_decoded_str, charset in decode_header(header): for to_decoded_str, charset in decode_header(header):
if charset is None: if charset is None:
if type(to_decoded_str) is bytes: if isinstance(to_decoded_str, bytes):
decoded_str = to_decoded_str.decode() decoded_str = to_decoded_str.decode()
else: else:
decoded_str = to_decoded_str decoded_str = to_decoded_str
@ -805,13 +825,13 @@ def to_bytes(msg: Message):
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]: for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
try: try:
return msg.as_bytes(policy=generator_policy) return msg.as_bytes(policy=generator_policy)
except: except Exception:
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True) LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
msg_string = msg.as_string() msg_string = msg.as_string()
try: try:
return msg_string.encode() return msg_string.encode()
except: except Exception:
LOG.w("as_string().encode() fails", exc_info=True) LOG.w("as_string().encode() fails", exc_info=True)
return msg_string.encode(errors="replace") return msg_string.encode(errors="replace")
@ -906,7 +926,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
if content_type == "text/plain": if content_type == "text/plain":
encoding = get_encoding(msg) encoding = get_encoding(msg)
payload = msg.get_payload() payload = msg.get_payload()
if type(payload) is str: if isinstance(payload, str):
clone_msg = copy(msg) clone_msg = copy(msg)
new_payload = f"""{text_header} new_payload = f"""{text_header}
------------------------------ ------------------------------
@ -916,7 +936,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
elif content_type == "text/html": elif content_type == "text/html":
encoding = get_encoding(msg) encoding = get_encoding(msg)
payload = msg.get_payload() payload = msg.get_payload()
if type(payload) is str: if isinstance(payload, str):
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0; new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
-premailer-cellspacing: 0; margin: 0; padding: 0;"> -premailer-cellspacing: 0; margin: 0; padding: 0;">
<tr> <tr>
@ -972,7 +992,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]: def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
if type(msg) is str: if isinstance(msg, str):
msg = msg.replace(old, new) msg = msg.replace(old, new)
return msg return msg
@ -995,7 +1015,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
if content_type in ("text/plain", "text/html"): if content_type in ("text/plain", "text/html"):
encoding = get_encoding(msg) encoding = get_encoding(msg)
payload = msg.get_payload() payload = msg.get_payload()
if type(payload) is str: if isinstance(payload, str):
if encoding == EmailEncoding.QUOTED: if encoding == EmailEncoding.QUOTED:
LOG.d("handle quoted-printable replace %s -> %s", old, new) LOG.d("handle quoted-printable replace %s -> %s", old, new)
# first decode the payload # first decode the payload

View File

@ -9,6 +9,7 @@ class LoginEvent:
failed = 1 failed = 1
disabled_login = 2 disabled_login = 2
not_activated = 3 not_activated = 3
scheduled_to_be_deleted = 4
class Source(EnumE): class Source(EnumE):
web = 0 web = 0

View File

@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
from_header = get_header_unicode(msg[headers.FROM]) from_header = get_header_unicode(msg[headers.FROM])
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content. warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
More info on https://simplelogin.io/docs/getting-started/anti-phishing/ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
""" """
warning_html = f""" warning_html = """
<p style="color:red"> <p style="color:red">
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content. This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a> More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
refused_email = RefusedEmail.create( refused_email = RefusedEmail.create(
full_report_path=s3_report_path, user_id=alias.user_id, flush=True full_report_path=s3_report_path, user_id=alias.user_id, flush=True
) )
return EmailLog.create( email_log = EmailLog.create(
user_id=alias.user_id, user_id=alias.user_id,
mailbox_id=alias.mailbox_id, mailbox_id=alias.mailbox_id,
contact_id=contact.id, contact_id=contact.id,
@ -142,6 +142,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
blocked=True, blocked=True,
commit=True, commit=True,
) )
return email_log
def apply_dmarc_policy_for_reply_phase( def apply_dmarc_policy_for_reply_phase(

View File

@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
return True return True
if is_deleted_alias(msg_info.sender_address): if is_deleted_alias(msg_info.sender_address):
LOG.i(f"Complaint is for deleted alias. Do nothing") LOG.i("Complaint is for deleted alias. Do nothing")
return True return True
contact = Contact.get_by(reply_email=msg_info.sender_address) contact = Contact.get_by(reply_email=msg_info.sender_address)
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
alias = find_alias_with_address(msg_info.rcpt_address) alias = find_alias_with_address(msg_info.rcpt_address)
if is_deleted_alias(msg_info.rcpt_address): if is_deleted_alias(msg_info.rcpt_address):
LOG.i(f"Complaint is for deleted alias. Do nothing") LOG.i("Complaint is for deleted alias. Do nothing")
return True return True
if not alias: if not alias:

View File

@ -54,9 +54,8 @@ class UnsubscribeEncoder:
def encode_subject( def encode_subject(
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData] cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
) -> str: ) -> str:
if ( if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
action != UnsubscribeAction.OriginalUnsubscribeMailto data, int
and type(data) is not int
): ):
raise ValueError(f"Data has to be an int for an action of type {action}") raise ValueError(f"Data has to be an int for an action of type {action}")
if action == UnsubscribeAction.OriginalUnsubscribeMailto: if action == UnsubscribeAction.OriginalUnsubscribeMailto:

View File

@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
LOG.d("Download file %s from %s", batch_import.file, file_url) LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url) r = requests.get(file_url)
lines = [line.decode() for line in r.iter_lines()] lines = [line.decode("utf-8") for line in r.iter_lines()]
import_from_csv(batch_import, user, lines) import_from_csv(batch_import, user, lines)

View File

@ -1,2 +1,4 @@
from .integrations import set_enable_proton_cookie from .integrations import set_enable_proton_cookie
from .exit_sudo import exit_sudo_mode from .exit_sudo import exit_sudo_mode
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]

View File

@ -39,7 +39,6 @@ from app.models import (
class ExportUserDataJob: class ExportUserDataJob:
REMOVE_FIELDS = { REMOVE_FIELDS = {
"User": ("otp_secret", "password"), "User": ("otp_secret", "password"),
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"), "Alias": ("ts_vector", "transfer_token", "hibp_last_check"),

View File

@ -22,7 +22,6 @@ from app.message_utils import message_to_bytes, message_format_base64_parts
@dataclass @dataclass
class SendRequest: class SendRequest:
SAVE_EXTENSION = "sendrequest" SAVE_EXTENSION = "sendrequest"
envelope_from: str envelope_from: str

View File

@ -27,7 +27,7 @@ from sqlalchemy.orm import deferred
from sqlalchemy.sql import and_ from sqlalchemy.sql import and_
from sqlalchemy_utils import ArrowType from sqlalchemy_utils import ArrowType
from app import config from app import config, rate_limiter
from app import s3 from app import s3
from app.db import Session from app.db import Session
from app.dns_utils import get_mx_domains from app.dns_utils import get_mx_domains
@ -235,6 +235,7 @@ class AuditLogActionEnum(EnumE):
download_provider_complaint = 8 download_provider_complaint = 8
disable_user = 9 disable_user = 9
enable_user = 10 enable_user = 10
stop_trial = 11
class Phase(EnumE): class Phase(EnumE):
@ -280,6 +281,7 @@ class IntEnumType(sa.types.TypeDecorator):
class AliasOptions: class AliasOptions:
show_sl_domains: bool = True show_sl_domains: bool = True
show_partner_domains: Optional[Partner] = None show_partner_domains: Optional[Partner] = None
show_partner_premium: Optional[bool] = None
class Hibp(Base, ModelMixin): class Hibp(Base, ModelMixin):
@ -539,10 +541,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
nullable=False, nullable=False,
) )
# Trigger hard deletion of the account at this time
delete_on = sa.Column(ArrowType, default=None)
__table_args__ = ( __table_args__ = (
sa.Index( sa.Index(
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime "ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
), ),
sa.Index("ix_users_delete_on", delete_on),
) )
@property @property
@ -721,6 +727,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return True return True
def is_active(self) -> bool:
if self.delete_on is None:
return True
return self.delete_on < arrow.now()
def in_trial(self): def in_trial(self):
"""return True if user does not have lifetime licence or an active subscription AND is in trial period""" """return True if user does not have lifetime licence or an active subscription AND is in trial period"""
if self.lifetime_or_active_subscription(): if self.lifetime_or_active_subscription():
@ -822,6 +833,9 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
Whether user can create a new alias. User can't create a new alias if Whether user can create a new alias. User can't create a new alias if
- has more than 15 aliases in the free plan, *even in the free trial* - has more than 15 aliases in the free plan, *even in the free trial*
""" """
if not self.is_active():
return False
if self.disabled: if self.disabled:
return False return False
@ -833,6 +847,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
< self.max_alias_for_free_account() < self.max_alias_for_free_account()
) )
def can_send_or_receive(self) -> bool:
if self.disabled:
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
return False
if self.delete_on is not None:
LOG.i(
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
)
return False
return True
def profile_picture_url(self): def profile_picture_url(self):
if self.profile_picture_id: if self.profile_picture_id:
return self.profile_picture.get_url() return self.profile_picture.get_url()
@ -891,7 +916,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return sub return sub
def verified_custom_domains(self) -> List["CustomDomain"]: def verified_custom_domains(self) -> List["CustomDomain"]:
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all() return (
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
.order_by(CustomDomain.domain.asc())
.all()
)
def mailboxes(self) -> List["Mailbox"]: def mailboxes(self) -> List["Mailbox"]:
"""list of mailbox that user own""" """list of mailbox that user own"""
@ -1023,29 +1052,35 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
) -> list["SLDomain"]: ) -> list["SLDomain"]:
if alias_options is None: if alias_options is None:
alias_options = AliasOptions() alias_options = AliasOptions()
conditions = [SLDomain.hidden == False] # noqa: E712 top_conds = [SLDomain.hidden == False] # noqa: E712
if not self.is_premium(): or_conds = [] # noqa:E711
conditions.append(SLDomain.premium_only == False) # noqa: E712
partner_domain_cond = [] # noqa:E711
if self.default_alias_public_domain_id is not None: if self.default_alias_public_domain_id is not None:
partner_domain_cond.append( default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
SLDomain.id == self.default_alias_public_domain_id if not self.is_premium():
default_domain_conds.append(
SLDomain.premium_only == False # noqa: E712
) )
or_conds.append(and_(*default_domain_conds).self_group())
if alias_options.show_partner_domains is not None: if alias_options.show_partner_domains is not None:
partner_user = PartnerUser.filter_by( partner_user = PartnerUser.filter_by(
user_id=self.id, partner_id=alias_options.show_partner_domains.id user_id=self.id, partner_id=alias_options.show_partner_domains.id
).first() ).first()
if partner_user is not None: if partner_user is not None:
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
if alias_options.show_partner_premium is None:
alias_options.show_partner_premium = self.is_premium()
if not alias_options.show_partner_premium:
partner_domain_cond.append( partner_domain_cond.append(
SLDomain.partner_id == partner_user.partner_id SLDomain.premium_only == False # noqa: E712
) )
or_conds.append(and_(*partner_domain_cond).self_group())
if alias_options.show_sl_domains: if alias_options.show_sl_domains:
partner_domain_cond.append(SLDomain.partner_id == None) # noqa:E711 sl_conds = [SLDomain.partner_id == None] # noqa: E711
if len(partner_domain_cond) == 1: if not self.is_premium():
conditions.append(partner_domain_cond[0]) sl_conds.append(SLDomain.premium_only == False) # noqa: E712
else: or_conds.append(and_(*sl_conds).self_group())
conditions.append(or_(*partner_domain_cond)) top_conds.append(or_(*or_conds))
query = Session.query(SLDomain).filter(*conditions).order_by(SLDomain.order) query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
return query.all() return query.all()
def available_alias_domains( def available_alias_domains(
@ -1091,6 +1126,13 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return random_words(1) return random_words(1)
def can_create_contacts(self) -> bool:
if self.is_premium():
return True
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def __repr__(self): def __repr__(self):
return f"<User {self.id} {self.name} {self.email}>" return f"<User {self.id} {self.name} {self.email}>"
@ -1466,6 +1508,8 @@ class Alias(Base, ModelMixin):
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True) TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
) )
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
__table_args__ = ( __table_args__ = (
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"), Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
# index on note column using pg_trgm # index on note column using pg_trgm
@ -1484,6 +1528,7 @@ class Alias(Base, ModelMixin):
def mailboxes(self): def mailboxes(self):
ret = [self.mailbox] ret = [self.mailbox]
for m in self._mailboxes: for m in self._mailboxes:
if m.id is not self.mailbox.id:
ret.append(m) ret.append(m)
ret = [mb for mb in ret if mb.verified] ret = [mb for mb in ret if mb.verified]
@ -1533,6 +1578,15 @@ class Alias(Base, ModelMixin):
flush = kw.pop("flush", False) flush = kw.pop("flush", False)
new_alias = cls(**kw) new_alias = cls(**kw)
user = User.get(new_alias.user_id)
if user.is_premium():
limits = config.ALIAS_CREATE_RATE_LIMIT_PAID
else:
limits = config.ALIAS_CREATE_RATE_LIMIT_FREE
# limits is array of (hits,days)
for limit in limits:
key = f"alias_create_{limit[1]}d:{user.id}"
rate_limiter.check_bucket_limit(key, limit[0], limit[1])
email = kw["email"] email = kw["email"]
# make sure email is lowercase and doesn't have any whitespace # make sure email is lowercase and doesn't have any whitespace
@ -1925,6 +1979,7 @@ class Contact(Base, ModelMixin):
class EmailLog(Base, ModelMixin): class EmailLog(Base, ModelMixin):
__tablename__ = "email_log" __tablename__ = "email_log"
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
user_id = sa.Column( user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
@ -2014,6 +2069,20 @@ class EmailLog(Base, ModelMixin):
def get_dashboard_url(self): def get_dashboard_url(self):
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}" return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
@classmethod
def create(cls, *args, **kwargs):
commit = kwargs.pop("commit", False)
email_log = super().create(*args, **kwargs)
Session.flush()
if "alias_id" in kwargs:
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
Session.execute(
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
)
if commit:
Session.commit()
return email_log
def __repr__(self): def __repr__(self):
return f"<EmailLog {self.id}>" return f"<EmailLog {self.id}>"
@ -2576,6 +2645,7 @@ class Mailbox(Base, ModelMixin):
self.email.endswith("@proton.me") self.email.endswith("@proton.me")
or self.email.endswith("@protonmail.com") or self.email.endswith("@protonmail.com")
or self.email.endswith("@protonmail.ch") or self.email.endswith("@protonmail.ch")
or self.email.endswith("@proton.ch")
or self.email.endswith("@pm.me") or self.email.endswith("@pm.me")
): ):
return True return True
@ -3160,7 +3230,7 @@ class MessageIDMatching(Base, ModelMixin):
# to track what email_log that has created this matching # to track what email_log that has created this matching
email_log_id = sa.Column( email_log_id = sa.Column(
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
) )
email_log = orm.relationship("EmailLog") email_log = orm.relationship("EmailLog")
@ -3298,6 +3368,15 @@ class AdminAuditLog(Base):
}, },
) )
@classmethod
def stop_trial(cls, admin_user_id: int, user_id: int):
cls.create(
admin_user_id=admin_user_id,
action=AuditLogActionEnum.stop_trial.value,
model="User",
model_id=user_id,
)
@classmethod @classmethod
def disable_otp_fido( def disable_otp_fido(
cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool
@ -3493,7 +3572,7 @@ class PartnerSubscription(Base, ModelMixin):
class Newsletter(Base, ModelMixin): class Newsletter(Base, ModelMixin):
__tablename__ = "newsletter" __tablename__ = "newsletter"
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True) subject = sa.Column(sa.String(), nullable=False, index=True)
html = sa.Column(sa.Text) html = sa.Column(sa.Text)
plain_text = sa.Column(sa.Text) plain_text = sa.Column(sa.Text)

View File

@ -1 +1,3 @@
from . import views from . import views
__all__ = ["views"]

View File

@ -1 +1,3 @@
from .views import authorize, token, user_info from .views import authorize, token, user_info
__all__ = ["authorize", "token", "user_info"]

View File

@ -140,7 +140,7 @@ def authorize():
Scope=Scope, Scope=Scope,
) )
else: # POST - user allows or denies else: # POST - user allows or denies
if not current_user.is_authenticated or not current_user.is_active: if not current_user.is_authenticated or not current_user.is_active():
LOG.i( LOG.i(
"Attempt to validate a OAUth allow request by an unauthenticated user" "Attempt to validate a OAUth allow request by an unauthenticated user"
) )

View File

@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2 - the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
""" """
res = set() res = set()
if type(arg_input) is str: if isinstance(arg_input, str):
if " " in arg_input: if " " in arg_input:
for x in arg_input.split(" "): for x in arg_input.split(" "):
if x: if x:

View File

@ -5,3 +5,11 @@ from .views import (
account_activated, account_activated,
extension_redirect, extension_redirect,
) )
__all__ = [
"index",
"final",
"setup_done",
"account_activated",
"extension_redirect",
]

View File

@ -39,7 +39,6 @@ class _InnerLock:
lock_redis.storage.delete(lock_name) lock_redis.storage.delete(lock_name)
def __call__(self, f: Callable[..., Any]): def __call__(self, f: Callable[..., Any]):
if self.lock_suffix is None: if self.lock_suffix is None:
lock_suffix = f.__name__ lock_suffix = f.__name__
else: else:

View File

@ -5,3 +5,11 @@ from .views import (
provider1_callback, provider1_callback,
provider2_callback, provider2_callback,
) )
__all__ = [
"index",
"phone_reservation",
"twilio_callback",
"provider1_callback",
"provider2_callback",
]

40
app/app/rate_limiter.py Normal file
View File

@ -0,0 +1,40 @@
from datetime import datetime
from typing import Optional
import newrelic.agent
import redis.exceptions
import werkzeug.exceptions
from limits.storage import RedisStorage
from app.log import LOG
lock_redis: Optional[RedisStorage] = None
def set_redis_concurrent_lock(redis: RedisStorage):
global lock_redis
lock_redis = redis
def check_bucket_limit(
lock_name: Optional[str] = None,
max_hits: int = 5,
bucket_seconds: int = 3600,
):
# Calculate current bucket time
int_time = int(datetime.utcnow().timestamp())
bucket_id = int_time - (int_time % bucket_seconds)
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
if not lock_redis:
return
try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits:
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
newrelic.agent.record_custom_event(
"BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
)
raise werkzeug.exceptions.TooManyRequests()
except (redis.exceptions.RedisError, AttributeError):
LOG.e("Cannot connect to redis")

View File

@ -2,21 +2,23 @@ import flask
import limits.storage import limits.storage
from app.parallel_limiter import set_redis_concurrent_lock from app.parallel_limiter import set_redis_concurrent_lock
from app.rate_limiter import set_redis_concurrent_lock as rate_limit_set_redis
from app.session import RedisSessionStore from app.session import RedisSessionStore
def initialize_redis_services(app: flask.Flask, redis_url: str): def initialize_redis_services(app: flask.Flask, redis_url: str):
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"): if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
storage = limits.storage.RedisStorage(redis_url) storage = limits.storage.RedisStorage(redis_url)
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app) app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
set_redis_concurrent_lock(storage) set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
elif redis_url.startswith("redis+sentinel://"): elif redis_url.startswith("redis+sentinel://"):
storage = limits.storage.RedisSentinelStorage(redis_url) storage = limits.storage.RedisSentinelStorage(redis_url)
app.session_interface = RedisSessionStore( app.session_interface = RedisSessionStore(
storage.storage, storage.storage_slave, app storage.storage, storage.storage_slave, app
) )
set_redis_concurrent_lock(storage) set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
else: else:
raise RuntimeError( raise RuntimeError(
f"Tried to set_redis_session with an invalid redis url: ${redis_url}" f"Tried to set_redis_session with an invalid redis url: ${redis_url}"

View File

@ -13,17 +13,29 @@ from app.config import (
LOCAL_FILE_UPLOAD, LOCAL_FILE_UPLOAD,
UPLOAD_DIR, UPLOAD_DIR,
URL, URL,
AWS_ENDPOINT_URL,
) )
from app.log import LOG
if not LOCAL_FILE_UPLOAD:
_session = boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
)
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"): _s3_client = None
def _get_s3client():
global _s3_client
if _s3_client is None:
args = {
"aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": AWS_REGION,
}
if AWS_ENDPOINT_URL:
args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args)
return _s3_client
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0) bs.seek(0)
if LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
@ -34,7 +46,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
f.write(bs.read()) f.write(bs.read())
else: else:
_session.resource("s3").Bucket(BUCKET).put_object( _get_s3client().put_object(
Bucket=BUCKET,
Key=key, Key=key,
Body=bs, Body=bs,
ContentType=content_type, ContentType=content_type,
@ -52,7 +65,8 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
f.write(bs.read()) f.write(bs.read())
else: else:
_session.resource("s3").Bucket(BUCKET).put_object( _get_s3client().put_object(
Bucket=BUCKET,
Key=path, Key=path,
Body=bs, Body=bs,
# Support saving a remote file using Http header # Support saving a remote file using Http header
@ -67,13 +81,10 @@ def download_email(path: str) -> Optional[str]:
file_path = os.path.join(UPLOAD_DIR, path) file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f: with open(file_path, "rb") as f:
return f.read() return f.read()
resp = ( resp = _get_s3client().get_object(
_session.resource("s3") Bucket=BUCKET,
.Bucket(BUCKET)
.get_object(
Key=path, Key=path,
) )
)
if not resp or "Body" not in resp: if not resp or "Body" not in resp:
return None return None
return resp["Body"].read return resp["Body"].read
@ -88,8 +99,7 @@ def get_url(key: str, expires_in=3600) -> str:
if LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
return URL + "/static/upload/" + key return URL + "/static/upload/" + key
else: else:
s3_client = _session.client("s3") return _get_s3client().generate_presigned_url(
return s3_client.generate_presigned_url(
ExpiresIn=expires_in, ExpiresIn=expires_in,
ClientMethod="get_object", ClientMethod="get_object",
Params={"Bucket": BUCKET, "Key": key}, Params={"Bucket": BUCKET, "Key": key},
@ -100,5 +110,15 @@ def delete(path: str):
if LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
os.remove(os.path.join(UPLOAD_DIR, path)) os.remove(os.path.join(UPLOAD_DIR, path))
else: else:
o = _session.resource("s3").Bucket(BUCKET).Object(path) _get_s3client().delete_object(Bucket=BUCKET, Key=path)
o.delete()
def create_bucket_if_not_exists():
s3client = _get_s3client()
buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]:
if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists")
return
s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {BUCKET} created")

View File

@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
try: try:
data = pickle.loads(val) data = pickle.loads(val)
return ServerSession(data, session_id=session_id) return ServerSession(data, session_id=session_id)
except: except Exception:
pass pass
return ServerSession(session_id=str(uuid.uuid4())) return ServerSession(session_id=str(uuid.uuid4()))

View File

@ -49,11 +49,11 @@ def random_string(length=10, include_digits=False):
def convert_to_id(s: str): def convert_to_id(s: str):
"""convert a string to id-like: remove space, remove special accent""" """convert a string to id-like: remove space, remove special accent"""
s = s.replace(" ", "")
s = s.lower() s = s.lower()
s = unidecode(s) s = unidecode(s)
s = s.replace(" ", "")
return s return s[:256]
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-." _ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-."

View File

@ -5,11 +5,11 @@ from typing import List, Tuple
import arrow import arrow
import requests import requests
from sqlalchemy import func, desc, or_ from sqlalchemy import func, desc, or_, and_
from sqlalchemy.ext.compiler import compiles from sqlalchemy.ext.compiler import compiles
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from sqlalchemy.orm.exc import ObjectDeletedError from sqlalchemy.orm.exc import ObjectDeletedError
from sqlalchemy.sql import Insert from sqlalchemy.sql import Insert, text
from app import s3, config from app import s3, config
from app.alias_utils import nb_email_log_for_mailbox from app.alias_utils import nb_email_log_for_mailbox
@ -62,6 +62,8 @@ from app.proton.utils import get_proton_partner
from app.utils import sanitize_email from app.utils import sanitize_email
from server import create_light_app from server import create_light_app
DELETE_GRACE_DAYS = 30
def notify_trial_end(): def notify_trial_end():
for user in User.filter( for user in User.filter(
@ -85,23 +87,43 @@ def delete_logs():
delete_refused_emails() delete_refused_emails()
delete_old_monitoring() delete_old_monitoring()
for t in TransactionalEmail.filter( for t_email in TransactionalEmail.filter(
TransactionalEmail.created_at < arrow.now().shift(days=-7) TransactionalEmail.created_at < arrow.now().shift(days=-7)
): ):
TransactionalEmail.delete(t.id) TransactionalEmail.delete(t_email.id)
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)): for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
Bounce.delete(b.id) Bounce.delete(b.id)
Session.commit() Session.commit()
LOG.d("Delete EmailLog older than 2 weeks") LOG.d("Deleting EmailLog older than 2 weeks")
max_dt = arrow.now().shift(weeks=-2) total_deleted = 0
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete() batch_size = 500
Session.execute("set session statement_timeout=30000").rowcount
queries_done = 0
cutoff_time = arrow.now().shift(days=-14)
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
expected_queries = int(rows_to_delete / batch_size)
sql = text(
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
)
str_cutoff_time = cutoff_time.isoformat()
while total_deleted < rows_to_delete:
deleted_count = Session.execute(
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
).rowcount
Session.commit() Session.commit()
total_deleted += deleted_count
queries_done += 1
LOG.i(
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
)
if deleted_count < batch_size:
break
LOG.i("Delete %s email logs", nb_deleted) LOG.i("Deleted %s email logs", total_deleted)
def delete_refused_emails(): def delete_refused_emails():
@ -141,7 +163,7 @@ def notify_premium_end():
send_email( send_email(
user.email, user.email,
f"Your subscription will end soon", "Your subscription will end soon",
render( render(
"transactional/subscription-end.txt", "transactional/subscription-end.txt",
user=user, user=user,
@ -198,7 +220,7 @@ def notify_manual_sub_end():
LOG.d("Remind user %s that their manual sub is ending soon", user) LOG.d("Remind user %s that their manual sub is ending soon", user)
send_email( send_email(
user.email, user.email,
f"Your subscription will end soon", "Your subscription will end soon",
render( render(
"transactional/manual-subscription-end.txt", "transactional/manual-subscription-end.txt",
user=user, user=user,
@ -570,21 +592,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
""" """
monitoring_report += "\n====================================\n" monitoring_report += "\n====================================\n"
monitoring_report += f""" monitoring_report += """
# Account bounce report: # Account bounce report:
""" """
for email, bounces in bounce_report(): for email, bounces in bounce_report():
monitoring_report += f"{email}: {bounces}\n" monitoring_report += f"{email}: {bounces}\n"
monitoring_report += f"""\n monitoring_report += """\n
# Alias creation report: # Alias creation report:
""" """
for email, nb_alias, date in alias_creation_report(): for email, nb_alias, date in alias_creation_report():
monitoring_report += f"{email}, {date}: {nb_alias}\n" monitoring_report += f"{email}, {date}: {nb_alias}\n"
monitoring_report += f"""\n monitoring_report += """\n
# Full bounce detail report: # Full bounce detail report:
""" """
monitoring_report += all_bounce_report() monitoring_report += all_bounce_report()
@ -1079,14 +1101,14 @@ def notify_hibp():
) )
LOG.d( LOG.d(
f"Send new breaches found email to %s for %s breaches aliases", "Send new breaches found email to %s for %s breaches aliases",
user, user,
len(breached_aliases), len(breached_aliases),
) )
send_email( send_email(
user.email, user.email,
f"You were in a data breach", "You were in a data breach",
render( render(
"transactional/hibp-new-breaches.txt.jinja2", "transactional/hibp-new-breaches.txt.jinja2",
user=user, user=user,
@ -1106,6 +1128,23 @@ def notify_hibp():
Session.commit() Session.commit()
def clear_users_scheduled_to_be_deleted(dry_run=False):
users = User.filter(
and_(
User.delete_on.isnot(None),
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
)
).all()
for user in users:
LOG.i(
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
)
if dry_run:
continue
User.delete(user.id)
Session.commit()
if __name__ == "__main__": if __name__ == "__main__":
LOG.d("Start running cronjob") LOG.d("Start running cronjob")
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -1172,3 +1211,6 @@ if __name__ == "__main__":
elif args.job == "send_undelivered_mails": elif args.job == "send_undelivered_mails":
LOG.d("Sending undelivered emails") LOG.d("Sending undelivered emails")
load_unsent_mails_from_fs_and_resend() load_unsent_mails_from_fs_and_resend()
elif args.job == "delete_scheduled_users":
LOG.d("Deleting users scheduled to be deleted")
clear_users_scheduled_to_be_deleted(dry_run=True)

View File

@ -61,7 +61,12 @@ jobs:
schedule: "15 10 * * *" schedule: "15 10 * * *"
captureStderr: true captureStderr: true
- name: SimpleLogin delete users scheduled to be deleted
command: python /code/cron.py -j delete_scheduled_users
shell: /bin/bash
schedule: "15 11 * * *"
captureStderr: true
concurrencyPolicy: Forbid
- name: SimpleLogin send unsent emails - name: SimpleLogin send unsent emails
command: python /code/cron.py -j send_undelivered_mails command: python /code/cron.py -j send_undelivered_mails

View File

@ -388,7 +388,7 @@ Input:
- (Optional but recommended) `hostname` passed in query string - (Optional but recommended) `hostname` passed in query string
- Request Message Body in json (`Content-Type` is `application/json`) - Request Message Body in json (`Content-Type` is `application/json`)
- alias_prefix: string. The first part of the alias that user can choose. - alias_prefix: string. The first part of the alias that user can choose.
- signed_suffix: should be one of the suffixes returned in the `GET /api/v4/alias/options` endpoint. - signed_suffix: should be one of the suffixes returned in the `GET /api/v5/alias/options` endpoint.
- mailbox_ids: list of mailbox_id that "owns" this alias - mailbox_ids: list of mailbox_id that "owns" this alias
- (Optional) note: alias note - (Optional) note: alias note
- (Optional) name: alias name - (Optional) name: alias name

View File

@ -235,17 +235,17 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
contact.mail_from = mail_from contact.mail_from = mail_from
Session.commit() Session.commit()
else: else:
try: try:
contact_email_for_reply = (
contact_email if is_valid_email(contact_email) else ""
)
contact = Contact.create( contact = Contact.create(
user_id=alias.user_id, user_id=alias.user_id,
alias_id=alias.id, alias_id=alias.id,
website_email=contact_email, website_email=contact_email,
name=contact_name, name=contact_name,
mail_from=mail_from, mail_from=mail_from,
reply_email=generate_reply_email(contact_email, alias) reply_email=generate_reply_email(contact_email_for_reply, alias),
if is_valid_email(contact_email)
else NOREPLY,
automatic_created=True, automatic_created=True,
) )
if not contact_email: if not contact_email:
@ -637,8 +637,12 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
user = alias.user user = alias.user
if user.disabled: if not user.is_active():
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias) LOG.w(f"User {user} has been soft deleted")
return False, status.E502
if not user.can_send_or_receive():
LOG.i(f"User {user} cannot receive emails")
if should_ignore_bounce(envelope.mail_from): if should_ignore_bounce(envelope.mail_from):
return [(True, status.E207)] return [(True, status.E207)]
else: else:
@ -1056,6 +1060,9 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if not contact: if not contact:
LOG.w(f"No contact with {reply_email} as reverse alias") LOG.w(f"No contact with {reply_email} as reverse alias")
return False, status.E502 return False, status.E502
if not contact.user.is_active():
LOG.w(f"User {contact.user} has been soft deleted")
return False, status.E502
alias = contact.alias alias = contact.alias
alias_address: str = contact.alias.email alias_address: str = contact.alias.email
@ -1070,13 +1077,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
user = alias.user user = alias.user
mail_from = envelope.mail_from mail_from = envelope.mail_from
if user.disabled: if not user.can_send_or_receive():
LOG.e( LOG.i(f"User {user} cannot send emails")
"User %s disabled, disable sending emails from %s to %s",
user,
alias,
contact,
)
return False, status.E504 return False, status.E504
# Check if we need to reject or quarantine based on dmarc # Check if we need to reject or quarantine based on dmarc
@ -1202,7 +1204,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
) )
# replace reverse alias by real address for all contacts # replace reverse alias by real address for all contacts
for (reply_email, website_email) in contact_query.values( for reply_email, website_email in contact_query.values(
Contact.reply_email, Contact.website_email Contact.reply_email, Contact.website_email
): ):
msg = replace(msg, reply_email, website_email) msg = replace(msg, reply_email, website_email)
@ -1257,7 +1259,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;": if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
# no need to replace TO header # no need to replace TO header
LOG.d("email is sent in BCC mode") LOG.d("email is sent in BCC mode")
del msg[headers.TO]
else: else:
replace_header_when_reply(msg, alias, headers.TO) replace_header_when_reply(msg, alias, headers.TO)
@ -1928,6 +1929,9 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
contact, contact,
alias, alias,
) )
if not email_log.user.is_active():
LOG.d(f"User {email_log.user} is not active")
return status.E510
if email_log.is_reply: if email_log.is_reply:
content_type = msg.get_content_type().lower() content_type = msg.get_content_type().lower()
@ -1958,7 +1962,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email): for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
res.append((is_delivered, smtp_status)) res.append((is_delivered, smtp_status))
for (is_success, smtp_status) in res: for is_success, smtp_status in res:
# Consider all deliveries successful if 1 delivery is successful # Consider all deliveries successful if 1 delivery is successful
if is_success: if is_success:
return smtp_status return smtp_status
@ -1989,6 +1993,9 @@ def send_no_reply_response(mail_from: str, msg: Message):
if not mailbox: if not mailbox:
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY)) LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
return return
if not mailbox.user.is_active():
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
return
send_email_at_most_times( send_email_at_most_times(
mailbox.user, mailbox.user,
ALERT_TO_NOREPLY, ALERT_TO_NOREPLY,
@ -2278,7 +2285,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
if nb_success > 0 and nb_non_success > 0: if nb_success > 0 and nb_non_success > 0:
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}") LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
for (is_success, smtp_status) in res: for is_success, smtp_status in res:
# Consider all deliveries successful if 1 delivery is successful # Consider all deliveries successful if 1 delivery is successful
if is_success: if is_success:
return smtp_status return smtp_status

View File

@ -192,7 +192,6 @@ amigos
amines amines
amnion amnion
amoeba amoeba
amoral
amount amount
amours amours
ampere ampere
@ -215,7 +214,6 @@ animus
anions anions
ankles ankles
anklet anklet
annals
anneal anneal
annoys annoys
annual annual
@ -364,7 +362,6 @@ auntie
aureus aureus
aurora aurora
author author
autism
autumn autumn
avails avails
avatar avatar
@ -638,14 +635,12 @@ bigwig
bijoux bijoux
bikers bikers
biking biking
bikini
bilges bilges
bilked bilked
bilker bilker
billed billed
billet billet
billow billow
bimbos
binary binary
binder binder
binged binged
@ -710,8 +705,6 @@ blocks
blokes blokes
blonde blonde
blonds blonds
bloods
bloody
blooms blooms
bloops bloops
blotch blotch
@ -817,8 +810,6 @@ bounds
bounty bounty
bovine bovine
bovver bovver
bowels
bowers
bowing bowing
bowled bowled
bowleg bowleg
@ -827,10 +818,8 @@ bowman
bowmen bowmen
bowwow bowwow
boxcar boxcar
boxers
boxier boxier
boxing boxing
boyish
braced braced
bracer bracer
braces braces
@ -861,7 +850,6 @@ breach
breads breads
breaks breaks
breams breams
breast
breath breath
breech breech
breeds breeds
@ -872,9 +860,6 @@ brevet
brewed brewed
brewer brewer
briars briars
bribed
briber
bribes
bricks bricks
bridal bridal
brides brides
@ -926,13 +911,7 @@ buffed
buffer buffer
buffet buffet
bugged bugged
bugger
bugled
bugler
bugles
builds builds
bulged
bulges
bulked bulked
bulled bulled
bullet bullet
@ -1340,8 +1319,6 @@ clingy
clinic clinic
clinks clinks
clique clique
cloaca
cloaks
cloche cloche
clocks clocks
clomps clomps
@ -1448,7 +1425,6 @@ comply
compos compos
conchs conchs
concur concur
condom
condor condor
condos condos
coneys coneys
@ -1568,8 +1544,6 @@ cranes
cranks cranks
cranky cranky
cranny cranny
crapes
crappy
crated crated
crater crater
crates crates
@ -1585,7 +1559,6 @@ crazes
creaks creaks
creaky creaky
creams creams
creamy
crease crease
create create
creche creche
@ -1594,8 +1567,6 @@ credos
creeds creeds
creeks creeks
creels creels
creeps
creepy
cremes cremes
creole creole
crepes crepes
@ -1728,9 +1699,6 @@ dainty
daises daises
damage damage
damask damask
dammed
dammit
damned
damped damped
dampen dampen
damper damper
@ -1754,7 +1722,6 @@ darers
daring daring
darken darken
darker darker
darkie
darkly darkly
darned darned
darner darner
@ -1763,8 +1730,6 @@ darter
dashed dashed
dasher dasher
dashes dashes
daters
dating
dative dative
daubed daubed
dauber dauber
@ -1921,7 +1886,6 @@ dharma
dhotis dhotis
diadem diadem
dialog dialog
diaper
diatom diatom
dibble dibble
dicier dicier
@ -1943,7 +1907,6 @@ digits
diking diking
diktat diktat
dilate dilate
dildos
dilute dilute
dimity dimity
dimmed dimmed
@ -2058,7 +2021,6 @@ dotted
double double
doubly doubly
doubts doubts
douche
doughy doughy
dourer dourer
dourly dourly
@ -2139,15 +2101,6 @@ duenna
duffed duffed
duffer duffer
dugout dugout
dulcet
dulled
duller
dumber
dumbly
dumbos
dumdum
dumped
dumper
dunces dunces
dunged dunged
dunked dunked
@ -2285,7 +2238,6 @@ endows
endued endued
endues endues
endure endure
enemas
energy energy
enfold enfold
engage engage
@ -2333,7 +2285,6 @@ erects
ermine ermine
eroded eroded
erodes erodes
erotic
errand errand
errant errant
errata errata
@ -2344,7 +2295,6 @@ eructs
erupts erupts
escape escape
eschew eschew
escort
escrow escrow
escudo escudo
espied espied
@ -2363,7 +2313,6 @@ ethnic
etudes etudes
euchre euchre
eulogy eulogy
eunuch
eureka eureka
evaded evaded
evader evader
@ -2392,7 +2341,6 @@ exempt
exerts exerts
exeunt exeunt
exhale exhale
exhort
exhume exhume
exiled exiled
exiles exiles
@ -2415,7 +2363,6 @@ extant
extend extend
extent extent
extols extols
extort
extras extras
exuded exuded
exudes exudes
@ -2440,7 +2387,6 @@ faeces
faerie faerie
faffed faffed
fagged fagged
faggot
failed failed
faille faille
fainer fainer
@ -2473,18 +2419,10 @@ faring
farmed farmed
farmer farmer
farrow farrow
farted
fascia fascia
fasted fasted
fasten fasten
faster faster
father
fathom
fating
fatsos
fatten
fatter
fatwas
faucet faucet
faults faults
faulty faulty
@ -2532,7 +2470,6 @@ fesses
festal festal
fester fester
feting feting
fetish
fetter fetter
fettle fettle
feudal feudal
@ -2617,9 +2554,7 @@ flaked
flakes flakes
flambe flambe
flamed flamed
flamer
flames flames
flange
flanks flanks
flared flared
flares flares
@ -2754,8 +2689,6 @@ franks
frappe frappe
frauds frauds
frayed frayed
freaks
freaky
freely freely
freest freest
freeze freeze
@ -2795,8 +2728,6 @@ fryers
frying frying
ftpers ftpers
ftping ftping
fucked
fucker
fuddle fuddle
fudged fudged
fudges fudges
@ -2891,10 +2822,7 @@ gasbag
gashed gashed
gashes gashes
gasket gasket
gasman
gasmen
gasped gasped
gassed
gasses gasses
gateau gateau
gather gather
@ -3104,7 +3032,6 @@ grimed
grimes grimes
grimly grimly
grinds grinds
gringo
griped griped
griper griper
gripes gripes
@ -3186,8 +3113,6 @@ gypsum
gyrate gyrate
gyving gyving
habits habits
hacked
hacker
hackle hackle
hadith hadith
haggis haggis
@ -3195,8 +3120,6 @@ haggle
hailed hailed
hairdo hairdo
haired haired
hajjes
hajjis
halest halest
haling haling
halite halite
@ -3223,11 +3146,8 @@ happen
haptic haptic
harass harass
harden harden
harder
hardly
harems harems
haring haring
harked
harlot harlot
harmed harmed
harped harped
@ -3407,7 +3327,6 @@ hoofed
hoofer hoofer
hookah hookah
hooked hooked
hooker
hookup hookup
hooped hooped
hoopla hoopla
@ -3459,8 +3378,6 @@ huffed
hugely hugely
hugest hugest
hugged hugged
hulled
huller
humane humane
humans humans
humble humble
@ -3667,8 +3584,6 @@ jacket
jading jading
jagged jagged
jaguar jaguar
jailed
jailer
jalopy jalopy
jammed jammed
jangle jangle
@ -3689,8 +3604,6 @@ jejune
jelled jelled
jellos jellos
jennet jennet
jerked
jerkin
jersey jersey
jested jested
jester jester
@ -3814,11 +3727,7 @@ kidded
kidder kidder
kiddie kiddie
kiddos kiddos
kidnap
kidney kidney
killed
killer
kilned
kilted kilted
kilter kilter
kimono kimono
@ -3827,15 +3736,11 @@ kinder
kindle kindle
kindly kindly
kingly kingly
kinked
kiosks kiosks
kipped kipped
kipper kipper
kirsch kirsch
kismet kismet
kissed
kisser
kisses
kiting kiting
kitsch kitsch
kitted kitted
@ -3847,10 +3752,6 @@ kluges
klutzy klutzy
knacks knacks
knaves knaves
kneads
kneels
knells
knifed
knifes knifes
knight knight
knives knives
@ -4210,8 +4111,6 @@ lunges
lupine lupine
lupins lupins
luring luring
lurked
lurker
lusher lusher
lushes lushes
lushly lushly
@ -4608,7 +4507,6 @@ muggle
mukluk mukluk
mulcts mulcts
mulish mulish
mullah
mulled mulled
mullet mullet
mumble mumble
@ -4721,9 +4619,6 @@ nickel
nicker nicker
nickle nickle
nieces nieces
niggas
niggaz
nigger
niggle niggle
nigher nigher
nights nights
@ -4736,7 +4631,6 @@ ninjas
ninths ninths
nipped nipped
nipper nipper
nipple
nitric nitric
nitwit nitwit
nixing nixing
@ -4781,15 +4675,6 @@ nozzle
nuance nuance
nubbin nubbin
nubile nubile
nuclei
nudest
nudged
nudges
nudism
nudist
nudity
nugget
nuking
numbed numbed
number number
numbly numbly
@ -4804,7 +4689,6 @@ nutter
nuzzle nuzzle
nybble nybble
nylons nylons
nympho
nymphs nymphs
oafish oafish
oaring oaring
@ -4885,7 +4769,6 @@ opting
option option
opuses opuses
oracle oracle
orally
orange orange
orated orated
orates orates
@ -4897,7 +4780,6 @@ ordeal
orders orders
ordure ordure
organs organs
orgasm
orgies orgies
oriels oriels
orient orient
@ -4993,10 +4875,6 @@ pander
panels panels
panics panics
panned panned
panted
pantie
pantos
pantry
papacy papacy
papaya papaya
papers papers
@ -5078,7 +4956,6 @@ pebble
pebbly pebbly
pecans pecans
pecked pecked
pecker
pectic pectic
pectin pectin
pedalo pedalo
@ -5151,9 +5028,6 @@ phenom
phials phials
phlegm phlegm
phloem phloem
phobia
phobic
phoebe
phoned phoned
phones phones
phoney phoney
@ -5228,9 +5102,6 @@ piques
piracy piracy
pirate pirate
pirogi pirogi
pissed
pisser
pisses
pistes pistes
pistil pistil
pistol pistol
@ -5311,8 +5182,6 @@ pogrom
points points
pointy pointy
poised poised
poises
poison
pokers pokers
pokeys pokeys
pokier pokier
@ -5422,7 +5291,6 @@ preyed
priced priced
prices prices
pricey pricey
pricks
prided prided
prides prides
priers priers
@ -5602,14 +5470,9 @@ rabbit
rabble rabble
rabies rabies
raceme raceme
racers
racial
racier racier
racily racily
racing racing
racism
racist
racked
racket racket
radars radars
radial radial
@ -5661,8 +5524,6 @@ rapers
rapids rapids
rapier rapier
rapine rapine
raping
rapist
rapped rapped
rappel rappel
rapper rapper
@ -5747,7 +5608,6 @@ recoup
rectal rectal
rector rector
rectos rectos
rectum
recurs recurs
recuse recuse
redact redact
@ -5891,7 +5751,6 @@ resume
retail retail
retain retain
retake retake
retard
retell retell
retest retest
retied retied
@ -6125,8 +5984,6 @@ sadden
sadder sadder
saddle saddle
sadhus sadhus
sadism
sadist
safari safari
safely safely
safest safest
@ -6364,16 +6221,6 @@ severs
sewage sewage
sewers sewers
sewing sewing
sexier
sexily
sexing
sexism
sexist
sexpot
sextet
sexton
sexual
shabby
shacks shacks
shaded shaded
shades shades
@ -6383,10 +6230,7 @@ shaggy
shaken shaken
shaker shaker
shakes shakes
shalom
shaman shaman
shamed
shames
shandy shandy
shanks shanks
shanty shanty
@ -6432,7 +6276,6 @@ shirks
shirrs shirrs
shirts shirts
shirty shirty
shitty
shiver shiver
shoals shoals
shoats shoats
@ -6575,9 +6418,6 @@ slangy
slants slants
slated slated
slates slates
slaved
slaver
slaves
slayed slayed
slayer slayer
sleaze sleaze
@ -6672,7 +6512,6 @@ snarks
snarky snarky
snarls snarls
snarly snarly
snatch
snazzy snazzy
sneaks sneaks
sneaky sneaky
@ -6716,7 +6555,6 @@ socket
sodded sodded
sodden sodden
sodium sodium
sodomy
soever soever
soften soften
softer softer
@ -7468,7 +7306,6 @@ torrid
torsos torsos
tortes tortes
tossed tossed
tosser
tosses tosses
tossup tossup
totals totals
@ -7686,7 +7523,6 @@ unhook
unhurt unhurt
unions unions
unique unique
unisex
unison unison
united united
unites unites
@ -7793,7 +7629,6 @@ vacant
vacate vacate
vacuum vacuum
vagary vagary
vagina
vaguer vaguer
vainer vainer
vainly vainly
@ -7930,9 +7765,6 @@ votive
vowels vowels
vowing vowing
voyage voyage
voyeur
vulgar
vulvae
wabbit wabbit
wacker wacker
wackos wackos
@ -7975,7 +7807,6 @@ wander
wangle wangle
waning waning
wanked wanked
wanker
wanner wanner
wanted wanted
wanton wanton

View File

@ -89,7 +89,6 @@ aghast
agile agile
agility agility
aging aging
agnostic
agonize agonize
agonizing agonizing
agony agony
@ -375,8 +374,6 @@ augmented
august august
authentic authentic
author author
autism
autistic
autograph autograph
automaker automaker
automated automated
@ -446,7 +443,6 @@ backyard
bacon bacon
bacteria bacteria
bacterium bacterium
badass
badge badge
badland badland
badly badly
@ -1106,7 +1102,6 @@ clinic
clinking clinking
clip clip
clique clique
cloak
clobber clobber
clock clock
clone clone
@ -1776,7 +1771,6 @@ diagnosis
diagram diagram
dial dial
diameter diameter
diaper
diaphragm diaphragm
diary diary
dice dice
@ -1950,7 +1944,6 @@ dosage
dose dose
dotted dotted
doubling doubling
douche
dove dove
down down
dowry dowry
@ -2032,9 +2025,6 @@ duffel
dugout dugout
duh duh
duke duke
duller
dullness
duly
dumping dumping
dumpling dumpling
dumpster dumpster
@ -2527,8 +2517,6 @@ feisty
feline feline
felt-tip felt-tip
feminine feminine
feminism
feminist
feminize feminize
femur femur
fence fence
@ -2667,7 +2655,6 @@ fondness
fondue fondue
font font
food food
fool
footage footage
football football
footbath footbath
@ -2777,7 +2764,6 @@ gag
gainfully gainfully
gaining gaining
gains gains
gala
gallantly gallantly
galleria galleria
gallery gallery
@ -3028,7 +3014,6 @@ groom
groove groove
grooving grooving
groovy groovy
grope
ground ground
grouped grouped
grout grout
@ -3148,7 +3133,6 @@ happiness
happy happy
harbor harbor
hardcopy hardcopy
hardcore
hardcover hardcover
harddisk harddisk
hardened hardened
@ -3164,8 +3148,6 @@ hardware
hardwired hardwired
hardwood hardwood
hardy hardy
harmful
harmless
harmonica harmonica
harmonics harmonics
harmonize harmonize
@ -3340,7 +3322,6 @@ identical
identify identify
identity identity
ideology ideology
idiocy
idiom idiom
idly idly
igloo igloo
@ -3357,7 +3338,6 @@ imaging
imbecile imbecile
imitate imitate
imitation imitation
immature
immerse immerse
immersion immersion
imminent imminent
@ -3387,14 +3367,10 @@ implode
implosion implosion
implosive implosive
imply imply
impolite
important important
importer importer
impose impose
imposing imposing
impotence
impotency
impotent
impound impound
imprecise imprecise
imprint imprint
@ -3424,8 +3400,6 @@ irritable
irritably irritably
irritant irritant
irritate irritate
islamic
islamist
isolated isolated
isolating isolating
isolation isolation
@ -3524,7 +3498,6 @@ june
junior junior
juniper juniper
junkie junkie
junkman
junkyard junkyard
jurist jurist
juror juror
@ -3570,9 +3543,6 @@ king
kinship kinship
kinsman kinsman
kinswoman kinswoman
kissable
kisser
kissing
kitchen kitchen
kite kite
kitten kitten
@ -3649,7 +3619,6 @@ laundry
laurel laurel
lavender lavender
lavish lavish
laxative
lazily lazily
laziness laziness
lazy lazy
@ -3690,7 +3659,6 @@ liable
liberty liberty
librarian librarian
library library
licking
licorice licorice
lid lid
life life
@ -3741,8 +3709,6 @@ livestock
lividly lividly
living living
lizard lizard
lubricant
lubricate
lucid lucid
luckily luckily
luckiness luckiness
@ -3878,7 +3844,6 @@ marshland
marshy marshy
marsupial marsupial
marvelous marvelous
marxism
mascot mascot
masculine masculine
mashed mashed
@ -3914,8 +3879,6 @@ maximum
maybe maybe
mayday mayday
mayflower mayflower
moaner
moaning
mobile mobile
mobility mobility
mobilize mobilize
@ -4124,7 +4087,6 @@ nemeses
nemesis nemesis
neon neon
nephew nephew
nerd
nervous nervous
nervy nervy
nest nest
@ -4139,7 +4101,6 @@ never
next next
nibble nibble
nickname nickname
nicotine
niece niece
nifty nifty
nimble nimble
@ -4167,14 +4128,10 @@ nuptials
nursery nursery
nursing nursing
nurture nurture
nutcase
nutlike nutlike
nutmeg nutmeg
nutrient nutrient
nutshell nutshell
nuttiness
nutty
nuzzle
nylon nylon
oaf oaf
oak oak
@ -4205,7 +4162,6 @@ obstinate
obstruct obstruct
obtain obtain
obtrusive obtrusive
obtuse
obvious obvious
occultist occultist
occupancy occupancy
@ -4446,7 +4402,6 @@ palpitate
paltry paltry
pampered pampered
pamperer pamperer
pampers
pamphlet pamphlet
panama panama
pancake pancake
@ -4651,7 +4606,6 @@ plated
platform platform
plating plating
platinum platinum
platonic
platter platter
platypus platypus
plausible plausible
@ -4777,8 +4731,6 @@ prancing
pranker pranker
prankish prankish
prankster prankster
prayer
praying
preacher preacher
preaching preaching
preachy preachy
@ -4796,8 +4748,6 @@ prefix
preflight preflight
preformed preformed
pregame pregame
pregnancy
pregnant
preheated preheated
prelaunch prelaunch
prelaw prelaw
@ -4937,7 +4887,6 @@ prudishly
prune prune
pruning pruning
pry pry
psychic
public public
publisher publisher
pucker pucker
@ -4957,8 +4906,7 @@ punctual
punctuate punctuate
punctured punctured
pungent pungent
punisher punishe
punk
pupil pupil
puppet puppet
puppy puppy
@ -5040,7 +4988,6 @@ quote
rabid rabid
race race
racing racing
racism
rack rack
racoon racoon
radar radar
@ -5155,7 +5102,6 @@ recount
recoup recoup
recovery recovery
recreate recreate
rectal
rectangle rectangle
rectified rectified
rectify rectify
@ -5622,7 +5568,6 @@ sarcastic
sardine sardine
sash sash
sasquatch sasquatch
sassy
satchel satchel
satiable satiable
satin satin
@ -5651,7 +5596,6 @@ scaling
scallion scallion
scallop scallop
scalping scalping
scam
scandal scandal
scanner scanner
scanning scanning
@ -5928,8 +5872,6 @@ silent
silica silica
silicon silicon
silk silk
silliness
silly
silo silo
silt silt
silver silver
@ -5991,7 +5933,6 @@ skimmer
skimming skimming
skimpily skimpily
skincare skincare
skinhead
skinless skinless
skinning skinning
skinny skinny
@ -6197,7 +6138,6 @@ splinter
splotchy splotchy
splurge splurge
spoilage spoilage
spoiled
spoiler spoiler
spoiling spoiling
spoils spoils
@ -6610,7 +6550,6 @@ swimmer
swimming swimming
swimsuit swimsuit
swimwear swimwear
swinger
swinging swinging
swipe swipe
swirl swirl
@ -7079,7 +7018,6 @@ undocked
undoing undoing
undone undone
undrafted undrafted
undress
undrilled undrilled
undusted undusted
undying undying
@ -7522,9 +7460,7 @@ villain
vindicate vindicate
vineyard vineyard
vintage vintage
violate
violation violation
violator
violet violet
violin violin
viper viper

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,33 @@
"""empty message
Revision ID: 0a5701a4f5e4
Revises: 01827104004b
Create Date: 2023-09-07 15:28:10.122756
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0a5701a4f5e4'
down_revision = '01827104004b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('delete_on', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True))
with op.get_context().autocommit_block():
op.create_index('ix_users_delete_on', 'users', ['delete_on'], unique=False, postgresql_concurrently=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index('ix_users_delete_on', table_name='users', postgresql_concurrently=True)
op.drop_column('users', 'delete_on')
# ### end Alembic commands ###

View File

@ -0,0 +1,34 @@
"""empty message
Revision ID: ec7fdde8da9f
Revises: 0a5701a4f5e4
Create Date: 2023-09-28 18:09:48.016620
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "ec7fdde8da9f"
down_revision = "0a5701a4f5e4"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.create_index(
"ix_email_log_created_at", "email_log", ["created_at"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index("ix_email_log_created_at", table_name="email_log")
# ### end Alembic commands ###

View File

@ -0,0 +1,39 @@
"""empty message
Revision ID: 46ecb648a47e
Revises: ec7fdde8da9f
Create Date: 2023-10-05 10:43:35.668902
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "46ecb648a47e"
down_revision = "ec7fdde8da9f"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.create_index(
op.f("ix_message_id_matching_email_log_id"),
"message_id_matching",
["email_log_id"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index(
op.f("ix_message_id_matching_email_log_id"),
table_name="message_id_matching",
)
# ### end Alembic commands ###

View File

@ -0,0 +1,31 @@
"""empty message
Revision ID: 4bc54632d9aa
Revises: 46ecb648a47e
Create Date: 2023-11-07 14:02:17.610226
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4bc54632d9aa'
down_revision = '46ecb648a47e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_newsletter_subject', table_name='newsletter')
op.create_index(op.f('ix_newsletter_subject'), 'newsletter', ['subject'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_newsletter_subject'), table_name='newsletter')
op.create_index('ix_newsletter_subject', 'newsletter', ['subject'], unique=True)
# ### end Alembic commands ###

View File

@ -0,0 +1,29 @@
"""empty message
Revision ID: 818b0a956205
Revises: 4bc54632d9aa
Create Date: 2024-02-01 10:43:46.253184
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818b0a956205'
down_revision = '4bc54632d9aa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alias', 'last_email_log_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,44 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill alias las use"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
for row in rows:
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -0,0 +1,53 @@
#!/usr/bin/env python3
import argparse
import time
from app import config
from app.email_utils import generate_reply_email
from app.email_validation import is_valid_email
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
el_query = "SELECT id, alias_id, website_email from contact where id>=:last_id AND reply_email=:reply_email ORDER BY id ASC LIMIT :step"
update_query = "UPDATE contact SET reply_email=:reply_email WHERE id=:contact_id "
updated = 0
start_time = time.time()
step = 100
last_id = 0
print(f"Replacing contacts with reply_email={config.NOREPLY}")
while True:
rows = Session.execute(
el_query, {"last_id": last_id, "reply_email": config.NOREPLY, "step": step}
)
loop_updated = 0
for row in rows:
contact_id = row[0]
alias_id = row[1]
last_id = contact_id
website_email = row[2]
contact_email_for_reply = website_email if is_valid_email(website_email) else ""
alias = Alias.get(alias_id)
if alias is None:
print(f"CANNOT find alias {alias_id} in database for contact {contact_id}")
reply_email = generate_reply_email(contact_email_for_reply, alias)
print(
f"Replacing contact {contact_id} with {website_email} reply_email for {reply_email}"
)
Session.execute(
update_query, {"contact_id": row[0], "reply_email": reply_email}
)
Session.commit()
updated += 1
loop_updated += 1
elapsed = time.time() - start_time
print(f"\rContact {last_id} done")
if loop_updated == 0:
break
print("")

308
app/poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. # This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
[[package]] [[package]]
name = "aiohttp" name = "aiohttp"
@ -99,12 +99,10 @@ files = [
[package.dependencies] [package.dependencies]
aiosignal = ">=1.1.2" aiosignal = ">=1.1.2"
async-timeout = ">=4.0.0a3,<5.0" async-timeout = ">=4.0.0a3,<5.0"
asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""}
attrs = ">=17.3.0" attrs = ">=17.3.0"
charset-normalizer = ">=2.0,<4.0" charset-normalizer = ">=2.0,<4.0"
frozenlist = ">=1.1.1" frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0" multidict = ">=4.5,<7.0"
typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
yarl = ">=1.0,<2.0" yarl = ">=1.0,<2.0"
[package.extras] [package.extras]
@ -138,7 +136,6 @@ files = [
[package.dependencies] [package.dependencies]
atpublic = "*" atpublic = "*"
attrs = "*" attrs = "*"
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
[[package]] [[package]]
name = "aiosmtplib" name = "aiosmtplib"
@ -157,17 +154,20 @@ uvloop = ["uvloop (>=0.13,<0.15)"]
[[package]] [[package]]
name = "aiospamc" name = "aiospamc"
version = "0.6.1" version = "0.10.0"
description = "An asyncio-based library to communicate with SpamAssassin's SPAMD service." description = "An asyncio-based library to communicate with SpamAssassin's SPAMD service."
optional = false optional = false
python-versions = ">=3.5,<4.0" python-versions = ">=3.8,<4.0"
files = [ files = [
{file = "aiospamc-0.6.1-py3-none-any.whl", hash = "sha256:63b7d213d6af01058b855ddcde2147485ea4e685d6d13ee682ad12cb1fa87ca6"}, {file = "aiospamc-0.10.0-py3-none-any.whl", hash = "sha256:53381adc53814647608ec864263eb975cf9bf04370f16adc2e1c1fa7aca2f538"},
{file = "aiospamc-0.6.1.tar.gz", hash = "sha256:4923bf3d1bf5a07151a3a9ea8be7862d9dcdef37a858035668ad1c726b7b98c1"}, {file = "aiospamc-0.10.0.tar.gz", hash = "sha256:a31abdbd809c7f74352e03166ec98685677a97ed8d1cbbbd6e1274cb8919c0d4"},
] ]
[package.dependencies] [package.dependencies]
certifi = ">=2019.9,<2020.0" certifi = "*"
loguru = ">=0.7.0,<0.8.0"
typer = ">=0.9.0,<0.10.0"
typing-extensions = ">=4.6.2,<5.0.0"
[[package]] [[package]]
name = "alembic" name = "alembic"
@ -225,8 +225,6 @@ files = [
[package.dependencies] [package.dependencies]
lazy-object-proxy = ">=1.4.0" lazy-object-proxy = ">=1.4.0"
setuptools = ">=20.0" setuptools = ">=20.0"
typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
wrapt = ">=1.11,<2" wrapt = ">=1.11,<2"
[[package]] [[package]]
@ -240,20 +238,6 @@ files = [
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
] ]
[package.dependencies]
typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
[[package]]
name = "asynctest"
version = "0.13.0"
description = "Enhance the standard unittest package with features for testing asyncio libraries"
optional = false
python-versions = ">=3.5"
files = [
{file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
{file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
]
[[package]] [[package]]
name = "atpublic" name = "atpublic"
version = "2.0" version = "2.0"
@ -264,9 +248,6 @@ files = [
{file = "atpublic-2.0.tar.gz", hash = "sha256:ebeb62b71a5c683a84c1b16bbf415708af5a46841b142b85ac3a22ec2d7613b0"}, {file = "atpublic-2.0.tar.gz", hash = "sha256:ebeb62b71a5c683a84c1b16bbf415708af5a46841b142b85ac3a22ec2d7613b0"},
] ]
[package.dependencies]
typing_extensions = {version = "*", markers = "python_version < \"3.8\""}
[[package]] [[package]]
name = "attrs" name = "attrs"
version = "20.2.0" version = "20.2.0"
@ -306,9 +287,6 @@ files = [
{file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"}, {file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"},
] ]
[package.dependencies]
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[package.extras] [package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["pytest", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] testing = ["pytest", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
@ -378,8 +356,6 @@ mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0" pathspec = ">=0.9.0"
platformdirs = ">=2" platformdirs = ">=2"
tomli = ">=1.1.0" tomli = ">=1.1.0"
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras] [package.extras]
colorama = ["colorama (>=0.4.3)"] colorama = ["colorama (>=0.4.3)"]
@ -562,7 +538,6 @@ files = [
[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""} colorama = {version = "*", markers = "platform_system == \"Windows\""}
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[[package]] [[package]]
name = "coinbase-commerce" name = "coinbase-commerce"
@ -1192,12 +1167,12 @@ files = [
[[package]] [[package]]
name = "future" name = "future"
version = "0.18.2" version = "0.18.3"
description = "Clean single-source support for Python 3 and 2" description = "Clean single-source support for Python 3 and 2"
optional = false optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
files = [ files = [
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"},
] ]
[[package]] [[package]]
@ -1383,6 +1358,7 @@ files = [
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
@ -1391,6 +1367,7 @@ files = [
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
@ -1420,6 +1397,7 @@ files = [
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
@ -1428,6 +1406,7 @@ files = [
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
@ -1488,15 +1467,18 @@ files = [
[[package]] [[package]]
name = "httplib2" name = "httplib2"
version = "0.18.1" version = "0.22.0"
description = "A comprehensive HTTP client library." description = "A comprehensive HTTP client library."
optional = false optional = false
python-versions = "*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [ files = [
{file = "httplib2-0.18.1-py3-none-any.whl", hash = "sha256:ca2914b015b6247791c4866782fa6042f495b94401a0f0bd3e1d6e0ba2236782"}, {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"},
{file = "httplib2-0.18.1.tar.gz", hash = "sha256:8af66c1c52c7ffe1aa5dc4bcd7c769885254b0756e6e69f953c7f0ab49a70ba3"}, {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"},
] ]
[package.dependencies]
pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""}
[[package]] [[package]]
name = "humanfriendly" name = "humanfriendly"
version = "8.2" version = "8.2"
@ -1548,7 +1530,6 @@ files = [
] ]
[package.dependencies] [package.dependencies]
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5" zipp = ">=0.5"
[package.extras] [package.extras]
@ -1762,15 +1743,33 @@ files = [
[package.dependencies] [package.dependencies]
six = ">=1.4.1" six = ">=1.4.1"
[[package]]
name = "loguru"
version = "0.7.2"
description = "Python logging made (stupidly) simple"
optional = false
python-versions = ">=3.5"
files = [
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
]
[package.dependencies]
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
[package.extras]
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
[[package]] [[package]]
name = "mako" name = "mako"
version = "1.1.3" version = "1.2.4"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages." description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=3.7"
files = [ files = [
{file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"}, {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"},
{file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"}, {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"},
] ]
[package.dependencies] [package.dependencies]
@ -1779,6 +1778,7 @@ MarkupSafe = ">=0.9.2"
[package.extras] [package.extras]
babel = ["Babel"] babel = ["Babel"]
lingua = ["lingua"] lingua = ["lingua"]
testing = ["pytest"]
[[package]] [[package]]
name = "markupsafe" name = "markupsafe"
@ -2101,9 +2101,6 @@ files = [
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
] ]
[package.dependencies]
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
[package.extras] [package.extras]
dev = ["pre-commit", "tox"] dev = ["pre-commit", "tox"]
@ -2132,7 +2129,6 @@ files = [
[package.dependencies] [package.dependencies]
cfgv = ">=2.0.0" cfgv = ">=2.0.0"
identify = ">=1.0.0" identify = ">=1.0.0"
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
nodeenv = ">=0.11.1" nodeenv = ">=0.11.1"
pyyaml = ">=5.1" pyyaml = ">=5.1"
toml = "*" toml = "*"
@ -2154,36 +2150,26 @@ wcwidth = "*"
[[package]] [[package]]
name = "protobuf" name = "protobuf"
version = "3.15.0" version = "4.24.3"
description = "Protocol Buffers" description = ""
optional = false optional = false
python-versions = "*" python-versions = ">=3.7"
files = [ files = [
{file = "protobuf-3.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:44d824adb48fe8baf81e628c2edaf9911912cd592a83621d2b877ccfde631d61"}, {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
{file = "protobuf-3.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:b04449133e31b65924650d758efbc2397c2d0e5eb3c8cae7428ffc4fa9c3403d"}, {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
{file = "protobuf-3.15.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:ef69a10d45529a08367e70e736b3ce8e2af51360f23650ef1d4381ff9038467a"}, {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
{file = "protobuf-3.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:50f28efa66232a2fbbdd638dd61d9399ff66bcfde40ff305263b229692928081"}, {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
{file = "protobuf-3.15.0-cp35-cp35m-win32.whl", hash = "sha256:25f0ee57684f7bc3f0511b73cf55c016a891d09079c357794759663fe3da9cd3"}, {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
{file = "protobuf-3.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:94b34486986d7683e83f9d02a0112533263fc20fae54fff3f4fd69451e682ec7"}, {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
{file = "protobuf-3.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:11f192d491613f692b3ddc18f06c925785b3019c8e35d32c811421ca9ff7d50e"}, {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
{file = "protobuf-3.15.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:867635c1d541ce336a1a4df3379d1116f02eba6dc326d080c8ef02f34036c415"}, {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
{file = "protobuf-3.15.0-cp36-cp36m-win32.whl", hash = "sha256:f6d10b1f86cebb8008a256f474948fc6204391e02a9c12935eebf036bbb07b65"}, {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
{file = "protobuf-3.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5c2ee13f5ea237a17bd81f52f972b7d334c0a43330d2a2a7b25b07f16eb146d8"}, {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
{file = "protobuf-3.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2ccc0169b5145b3af676b6997be6fe62961edfc12bb524a7b9c46fb5d208a3d4"}, {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
{file = "protobuf-3.15.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:51e080fb1de5db54b0a6b1519ba8dda55e57404b0a4948e58f1342a3e15d89ec"}, {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
{file = "protobuf-3.15.0-cp37-cp37m-win32.whl", hash = "sha256:d892e487bd544463ce1e656434591593f710169335ac3f02ce30ee866c2f2464"}, {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
{file = "protobuf-3.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:40f031f79b0254aa62082ca87776c0959d85adf99f09cdef9d0b320bb772a609"},
{file = "protobuf-3.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ae4bcd5a0ce3f77d0523c3e5ed0d04ed2af454f7bf7cef08cb7a8d0915ac80a9"},
{file = "protobuf-3.15.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:830a9c71df347b3fb3cd24ec985c4ed64f6e75983f543a1d8a3c96302dae915c"},
{file = "protobuf-3.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fecf1b00ccc87bb8debca8b56458cc57c486d2d7afe22c7526728f79ffe232f4"},
{file = "protobuf-3.15.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0e00b4e4a4800b389ae7f0058e1fc9d012444fdde926569d8cce55c84a01ef74"},
{file = "protobuf-3.15.0-py2.py3-none-any.whl", hash = "sha256:013a9ec4dccad9a6ed3aa1ad9e86a25a4e0d6d3bbe059b6f6502db20473c3e69"},
{file = "protobuf-3.15.0.tar.gz", hash = "sha256:e9f13fadb15b80e4a83ef5d9fa44e19243b1e2d96e84ee2228ca305180ca059e"},
] ]
[package.dependencies]
six = ">=1.9"
[[package]] [[package]]
name = "psutil" name = "psutil"
version = "5.7.2" version = "5.7.2"
@ -2414,7 +2400,6 @@ mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0" platformdirs = ">=2.2.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
tomlkit = ">=0.10.1" tomlkit = ">=0.10.1"
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
[package.extras] [package.extras]
spelling = ["pyenchant (>=3.2,<4.0)"] spelling = ["pyenchant (>=3.2,<4.0)"]
@ -2525,7 +2510,6 @@ files = [
[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""} colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
iniconfig = "*" iniconfig = "*"
packaging = "*" packaging = "*"
pluggy = ">=0.12,<2.0" pluggy = ">=0.12,<2.0"
@ -2665,19 +2649,17 @@ files = [
[[package]] [[package]]
name = "redis" name = "redis"
version = "4.5.3" version = "4.6.0"
description = "Python client for Redis database and key-value store" description = "Python client for Redis database and key-value store"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "redis-4.5.3-py3-none-any.whl", hash = "sha256:7df17a0a2b72a4c8895b462dd07616c51b1dcb48fdd7ecb7b6f4bf39ecb2e94e"}, {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"},
{file = "redis-4.5.3.tar.gz", hash = "sha256:56732e156fe31801c4f43396bd3ca0c2a7f6f83d7936798531b9848d103381aa"}, {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"},
] ]
[package.dependencies] [package.dependencies]
async-timeout = {version = ">=4.0.2", markers = "python_version < \"3.11\""} async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""}
importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""}
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
[package.extras] [package.extras]
hiredis = ["hiredis (>=1.0.0)"] hiredis = ["hiredis (>=1.0.0)"]
@ -2768,24 +2750,24 @@ files = [
[[package]] [[package]]
name = "requests" name = "requests"
version = "2.25.1" version = "2.31.0"
description = "Python HTTP for Humans." description = "Python HTTP for Humans."
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" python-versions = ">=3.7"
files = [ files = [
{file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
] ]
[package.dependencies] [package.dependencies]
certifi = ">=2017.4.17" certifi = ">=2017.4.17"
chardet = ">=3.0.2,<5" charset-normalizer = ">=2,<4"
idna = ">=2.5,<3" idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27" urllib3 = ">=1.21.1,<3"
[package.extras] [package.extras]
security = ["cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7)"]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "requests-file" name = "requests-file"
@ -2845,51 +2827,34 @@ files = [
{file = "ruamel.yaml-0.16.12.tar.gz", hash = "sha256:076cc0bc34f1966d920a49f18b52b6ad559fbe656a0748e3535cf7b3f29ebf9e"}, {file = "ruamel.yaml-0.16.12.tar.gz", hash = "sha256:076cc0bc34f1966d920a49f18b52b6ad559fbe656a0748e3535cf7b3f29ebf9e"},
] ]
[package.dependencies]
"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.9\""}
[package.extras] [package.extras]
docs = ["ryd"] docs = ["ryd"]
jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
[[package]] [[package]]
name = "ruamel.yaml.clib" name = "ruff"
version = "0.2.2" version = "0.1.5"
description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false optional = false
python-versions = "*" python-versions = ">=3.7"
files = [ files = [
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"}, {file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"},
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1"}, {file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"},
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win32.whl", hash = "sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"},
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win_amd64.whl", hash = "sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"},
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"},
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"},
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"},
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"},
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win32.whl", hash = "sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f"}, {file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"},
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62"}, {file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"},
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c"}, {file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"},
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988"}, {file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"},
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3"}, {file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"},
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2"}, {file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"},
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91"}, {file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"},
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6"}, {file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"},
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e"}, {file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"},
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4"},
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win32.whl", hash = "sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6"},
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5"},
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0"},
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99"},
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923"},
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win32.whl", hash = "sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1"},
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b"},
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a"},
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5"},
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c"},
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win32.whl", hash = "sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd"},
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb"},
{file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"},
] ]
[[package]] [[package]]
@ -3106,15 +3071,20 @@ url = ["furl (>=0.4.1)"]
[[package]] [[package]]
name = "sqlparse" name = "sqlparse"
version = "0.4.2" version = "0.4.4"
description = "A non-validating SQL parser." description = "A non-validating SQL parser."
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
files = [ files = [
{file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
{file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
] ]
[package.extras]
dev = ["build", "flake8"]
doc = ["sphinx"]
test = ["pytest", "pytest-cov"]
[[package]] [[package]]
name = "strictyaml" name = "strictyaml"
version = "1.1.0" version = "1.1.0"
@ -3249,47 +3219,35 @@ pytz = "*"
requests = ">=2.0.0" requests = ">=2.0.0"
[[package]] [[package]]
name = "typed-ast" name = "typer"
version = "1.5.2" version = "0.9.0"
description = "a fork of Python 2 and 3 ast modules with type comment support" description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
{file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"},
{file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"},
{file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"},
{file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"},
{file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"},
{file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"},
{file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"},
{file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"},
{file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"},
{file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"},
{file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"},
{file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"},
{file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"},
{file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"},
{file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"},
{file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"},
{file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"},
{file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"},
{file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"},
{file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"},
{file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"},
{file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"},
{file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"},
{file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"},
] ]
[package.dependencies]
click = ">=7.1.1,<9.0.0"
typing-extensions = ">=3.7.4.3"
[package.extras]
all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.0.1" version = "4.8.0"
description = "Backported and Experimental Type Hints for Python 3.6+" description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.8"
files = [ files = [
{file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
{file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
] ]
[[package]] [[package]]
@ -3345,7 +3303,6 @@ files = [
"backports.entry-points-selectable" = ">=1.0.4" "backports.entry-points-selectable" = ">=1.0.4"
distlib = ">=0.3.1,<1" distlib = ">=0.3.1,<1"
filelock = ">=3.0.0,<4" filelock = ">=3.0.0,<4"
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
platformdirs = ">=2,<3" platformdirs = ">=2,<3"
six = ">=1.9.0,<2" six = ">=1.9.0,<2"
@ -3426,6 +3383,20 @@ files = [
dev = ["coverage", "pallets-sphinx-themes", "pytest", "pytest-timeout", "sphinx", "sphinx-issues", "tox"] dev = ["coverage", "pallets-sphinx-themes", "pytest", "pytest-timeout", "sphinx", "sphinx-issues", "tox"]
watchdog = ["watchdog"] watchdog = ["watchdog"]
[[package]]
name = "win32-setctime"
version = "1.1.0"
description = "A small Python utility to set file creation time on Windows"
optional = false
python-versions = ">=3.5"
files = [
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]
[package.extras]
dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
[[package]] [[package]]
name = "wrapt" name = "wrapt"
version = "1.15.0" version = "1.15.0"
@ -3635,7 +3606,6 @@ files = [
[package.dependencies] [package.dependencies]
idna = ">=2.0" idna = ">=2.0"
multidict = ">=4.0" multidict = ">=4.0"
typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
[[package]] [[package]]
name = "zipp" name = "zipp"
@ -3729,5 +3699,5 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.7.2" python-versions = "^3.10"
content-hash = "9cf184eded5a8fb41f7725ff5ed0f26ad5bbd44b9d59a9180abb4c6bf3fe278a" content-hash = "01afc410d21eeac0a0ac7e8ef6eeb0a991cf4bc091c3351049263462e205ff63"

View File

@ -18,6 +18,10 @@ exclude = '''
) )
''' '''
[tool.ruff]
ignore-init-module-imports = true
exclude = [".venv", "migrations"]
[tool.djlint] [tool.djlint]
indent = 2 indent = 2
profile = "jinja" profile = "jinja"
@ -53,7 +57,7 @@ packages = [
include = ["templates/*", "templates/**/*", "local_data/*.txt"] include = ["templates/*", "templates/**/*", "local_data/*.txt"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.7.2" python = "^3.10"
flask = "^1.1.2" flask = "^1.1.2"
flask_login = "^0.5.0" flask_login = "^0.5.0"
wtforms = "^2.3.3" wtforms = "^2.3.3"
@ -96,7 +100,6 @@ pyspf = "^2.0.14"
Flask-Limiter = "^1.4" Flask-Limiter = "^1.4"
memory_profiler = "^0.57.0" memory_profiler = "^0.57.0"
gevent = "22.10.2" gevent = "22.10.2"
aiospamc = "^0.6.1"
email_validator = "^1.1.1" email_validator = "^1.1.1"
PGPy = "0.5.4" PGPy = "0.5.4"
coinbase-commerce = "^1.0.1" coinbase-commerce = "^1.0.1"
@ -112,6 +115,7 @@ cryptography = "37.0.1"
SQLAlchemy = "1.3.24" SQLAlchemy = "1.3.24"
redis = "^4.5.3" redis = "^4.5.3"
newrelic-telemetry-sdk = "^0.5.0" newrelic-telemetry-sdk = "^0.5.0"
aiospamc = "0.10"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
pytest = "^7.0.0" pytest = "^7.0.0"
@ -121,6 +125,9 @@ black = "^22.1.0"
djlint = "^1.3.0" djlint = "^1.3.0"
pylint = "^2.14.4" pylint = "^2.14.4"
[tool.poetry.group.dev.dependencies]
ruff = "^0.1.5"
[build-system] [build-system]
requires = ["poetry>=0.12"] requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api" build-backend = "poetry.masonry.api"

View File

@ -228,6 +228,8 @@ def load_user(alternative_id):
sentry_sdk.set_user({"email": user.email, "id": user.id}) sentry_sdk.set_user({"email": user.email, "id": user.id})
if user.disabled: if user.disabled:
return None return None
if not user.is_active():
return None
return user return user
@ -407,8 +409,10 @@ def jinja2_filter(app):
@app.context_processor @app.context_processor
def inject_stage_and_region(): def inject_stage_and_region():
now = arrow.now()
return dict( return dict(
YEAR=arrow.now().year, YEAR=now.year,
NOW=now,
URL=URL, URL=URL,
SENTRY_DSN=SENTRY_FRONT_END_DSN, SENTRY_DSN=SENTRY_FRONT_END_DSN,
VERSION=SHA1, VERSION=SHA1,
@ -641,7 +645,7 @@ def setup_paddle_callback(app: Flask):
@app.route("/paddle_coupon", methods=["GET", "POST"]) @app.route("/paddle_coupon", methods=["GET", "POST"])
def paddle_coupon(): def paddle_coupon():
LOG.d(f"paddle coupon callback %s", request.form) LOG.d("paddle coupon callback %s", request.form)
if not paddle_utils.verify_incoming_request(dict(request.form)): if not paddle_utils.verify_incoming_request(dict(request.form)):
LOG.e("request not coming from paddle. Request data:%s", dict(request.form)) LOG.e("request not coming from paddle. Request data:%s", dict(request.form))

View File

@ -1,13 +1,12 @@
from time import sleep
import flask_migrate import flask_migrate
from IPython import embed from IPython import embed
from sqlalchemy_utils import create_database, database_exists, drop_database from sqlalchemy_utils import create_database, database_exists, drop_database
from app import models from app import models
from app.config import DB_URI from app.config import DB_URI
from app.models import * from app.db import Session
from app.log import LOG
from app.models import User, RecoveryCode
if False: if False:
# noinspection PyUnreachableCode # noinspection PyUnreachableCode

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 38 KiB

View File

@ -15,7 +15,7 @@
{{ otp_token_form.csrf_token }} {{ otp_token_form.csrf_token }}
<input type="hidden" name="form-name" value="create" /> <input type="hidden" name="form-name" value="create" />
<div class="font-weight-bold mt-5">Token</div> <div class="font-weight-bold mt-5">Token</div>
<div class="small-text mb-3">Please enter the 2FA code from your 2FA authenticator</div> <div class="small-text mb-3">Please enter the 2FA code from your authenticator app</div>
{{ otp_token_form.token(class="form-control", autofocus="true") }} {{ otp_token_form.token(class="form-control", autofocus="true") }}
{{ render_field_errors(otp_token_form.token) }} {{ render_field_errors(otp_token_form.token) }}
<div class="form-check"> <div class="form-check">

View File

@ -9,7 +9,7 @@
<h1 class="card-title">Create new account</h1> <h1 class="card-title">Create new account</h1>
<div class="form-group"> <div class="form-group">
<label class="form-label">Email address</label> <label class="form-label">Email address</label>
{{ form.email(class="form-control", type="email", placeholder="YourName@protonmail.com") }} {{ form.email(class="form-control", type="email", placeholder="username@proton.me") }}
<div class="small-text alert alert-info" style="margin-top: 1px"> <div class="small-text alert alert-info" style="margin-top: 1px">
Emails sent to your alias will be forwarded to this email address. Emails sent to your alias will be forwarded to this email address.
<br> <br>

View File

@ -7,6 +7,7 @@
<div class="card-body p-6 text-center"> <div class="card-body p-6 text-center">
<h1 class="h4">An email to validate your email is on its way.</h1> <h1 class="h4">An email to validate your email is on its way.</h1>
<p>Please check your inbox/spam folder.</p> <p>Please check your inbox/spam folder.</p>
<p>Make sure to mark the message as not spam so that future messages come to your normal inbox</p>
</div> </div>
</div> </div>
{% endblock %} {% endblock %}

View File

@ -86,6 +86,12 @@
</head> </head>
<body> <body>
<div class="page"> <div class="page">
{% if NOW.timestamp < 1701475201 and current_user.is_authenticated and current_user.should_show_upgrade_button() %}
<div class="alert alert-success text-center mb-0" role="alert">
Black Friday: $20 for the first year instead of $30. Available until December 1st.
</div>
{% endif %}
{% block announcement %}{% endblock %} {% block announcement %}{% endblock %}
<div class="container"> <div class="container">
<!-- For flash messages --> <!-- For flash messages -->

View File

@ -59,6 +59,8 @@
</div> </div>
</div> </div>
</div> </div>
{% if can_create_contacts %}
<div class="row mb-5"> <div class="row mb-5">
<div class="col-12 col-lg-6 pt-1"> <div class="col-12 col-lg-6 pt-1">
<form method="post"> <form method="post">
@ -79,6 +81,7 @@
{% endif %} {% endif %}
</form> </form>
</div> </div>
{% endif %}
<div class="col-12 col-lg-6 pt-1"> <div class="col-12 col-lg-6 pt-1">
<div class="float-right d-flex"> <div class="float-right d-flex">
<form method="post"> <form method="post">

View File

@ -17,7 +17,7 @@
<b>hello@{{ FIRST_ALIAS_DOMAIN }}</b>, <b>hello@{{ FIRST_ALIAS_DOMAIN }}</b>,
<b>me@{{ FIRST_ALIAS_DOMAIN }}</b>, etc. <b>me@{{ FIRST_ALIAS_DOMAIN }}</b>, etc.
<br /> <br />
If you add your own domain, this restriction is removed, and you can fully customize the alias. If you add your own domain (or subdomain), this restriction is removed, and you can fully customize the alias.
<br /> <br />
</div> </div>
</div> </div>
@ -93,6 +93,7 @@
</div> </div>
<div class="row"> <div class="row">
<div class="col p-1"> <div class="col p-1">
{{ csrf_form.csrf_token }}
<button type="submit" id="create" class="btn btn-primary mt-1">Create</button> <button type="submit" id="create" class="btn btn-primary mt-1">Create</button>
</div> </div>
</div> </div>

View File

@ -268,7 +268,7 @@
If you are using a subdomain, e.g. <i>subdomain.domain.com</i>, If you are using a subdomain, e.g. <i>subdomain.domain.com</i>,
you need to use <i>dkim._domainkey.subdomain</i> as the domain instead. you need to use <i>dkim._domainkey.subdomain</i> as the domain instead.
<br /> <br />
That means, if your domain is <i>mail.domain.com</i> you should enter <i>dkim._domainkey.mail.domain.com</i> as the Domain. That means, if your domain is <i>mail.domain.com</i> you should enter <i>dkim._domainkey.mail</i> as the Domain.
<br /> <br />
</div> </div>
<div class="alert alert-info"> <div class="alert alert-info">

View File

@ -12,7 +12,7 @@
<div class="card-body"> <div class="card-body">
<h1 class="h3">Two Factor Authentication - TOTP</h1> <h1 class="h3">Two Factor Authentication - TOTP</h1>
<p> <p>
You will need to use a 2FA application like Google Authenticator or Authy on your phone or PC and scan the following QR Code: You will need to use a 2FA application like Proton Pass or Aegis on your phone or PC and scan the following QR Code:
</p> </p>
<canvas id="qr"></canvas> <canvas id="qr"></canvas>
<script> <script>

View File

@ -10,7 +10,7 @@
<div>{{ notification.message | safe }}</div> <div>{{ notification.message | safe }}</div>
<form method="post" <form method="post"
class="float-right mt-3" class="float-right mt-3"
onsubmit="return confirm('This operation is not reversible, please confirm');"> onsubmit="return confirm('This operation is irreversible, please confirm');">
<button class="btn btn-outline-danger">Delete</button> <button class="btn btn-outline-danger">Delete</button>
</form> </form>
</div> </div>

View File

@ -57,6 +57,22 @@
{% endblock %} {% endblock %}
{% block default_content %} {% block default_content %}
{% if NOW.timestamp < 1701475201 %}
<div class="alert alert-info">
Black Friday Deal: 33% off on the yearly plan for the <b>first</b> year ($20 instead of $30).
<br>
Please use this coupon code
<em data-toggle="tooltip"
title="Click to copy"
class="clipboard"
data-clipboard-text="BF2023">BF2023</em> during the checkout.
<br>
<img src="/static/images/coupon.png" class="m-2" style="max-width: 300px">
<br>
Available until December 1, 2023.
</div>
{% endif %}
<div class="pb-8"> <div class="pb-8">
<div class="text-center mx-md-auto mb-8 mt-6"> <div class="text-center mx-md-auto mb-8 mt-6">
<h1>Upgrade to unlock premium features</h1> <h1>Upgrade to unlock premium features</h1>

View File

@ -18,7 +18,7 @@
<br /> <br />
For generic questions, i.e. not related to your account, we recommend to post the question on For generic questions, i.e. not related to your account, we recommend to post the question on
our our
<a href="https://www.reddit.com/r/Simplelogin/">Reddit</a> <a href="https://www.reddit.com/r/Simplelogin/">Reddit</a> or <a href="https://forum.simplelogin.io/">our official forum</a>
where our community can help answer the question where our community can help answer the question
and other people with the same question can find the answer there. and other people with the same question can find the answer there.
</div> </div>

View File

@ -1,17 +1,19 @@
{% extends "default.html" %} {% extends "default.html" %}
{% set active_page = "dashboard" %} {% set active_page = "dashboard" %}
{% block title %}Block an alias{% endblock %} {% block title %}Deactivate an alias{% endblock %}
{% block default_content %} {% block default_content %}
<div class="card"> <div class="card">
<div class="card-body"> <div class="card-body">
<h1 class="h3">Block alias</h1> <h1 class="h3">Deactivate alias</h1>
<p> <p>
You are about to block the alias You are about to deactivate the alias
<a href="mailto:{{ alias }}" target="_blank">{{ alias }}</a> <a href="mailto:{{ alias }}" target="_blank">{{ alias }}</a>
</p> </p>
<p>After this, you will stop receiving all emails sent to this alias, please confirm.</p> <p>
After this, you will stop receiving all emails sent to this alias, please confirm. You will always be able to re-activate it untill you will decide to delete it.
</p>
<form method="post"> <form method="post">
<button class="btn btn-warning">Confirm</button> <button class="btn btn-warning">Confirm</button>
</form> </form>

View File

@ -43,9 +43,8 @@ Note, if you are a paying Proton Mail user, you automatically receive the premiu
{% endcall %} {% endcall %}
{% call text() %} {% call text() %}
For any question, feedback or feature request, please join our For any question or feedback, please join our <a href="https://forum.simplelogin.io/">official forum</a>.
<a href="https://github.com/simple-login/app/discussions">GitHub forum</a> If you want to request a feature, please submit it on our <a href="https://github.com/simple-login/app/discussions">GitHub repo</a>.
.
You can also join our You can also join our
<a href="https://www.reddit.com/r/Simplelogin/">Reddit</a> <a href="https://www.reddit.com/r/Simplelogin/">Reddit</a>
or follow our or follow our

View File

@ -13,7 +13,8 @@ SimpleLogin is also available on Android and iOS so you can manage your aliases
Note, if you are a paying Proton Mail user, you automatically receive the premium version of SimpleLogin. Note, if you are a paying Proton Mail user, you automatically receive the premium version of SimpleLogin.
For any question, feedback or feature request, please join our GitHub forum. For any question or feedback, please join our official forum.
If you want to request a feature, please submit it on our GitHub repo.
You can also join our Reddit or follow our Twitter. You can also join our Reddit or follow our Twitter.
Best, Best,
@ -26,7 +27,8 @@ Firefox: https://addons.mozilla.org/firefox/addon/simplelogin/
Edge: https://microsoftedge.microsoft.com/addons/detail/simpleloginreceive-sen/diacfpipniklenphgljfkmhinphjlfff Edge: https://microsoftedge.microsoft.com/addons/detail/simpleloginreceive-sen/diacfpipniklenphgljfkmhinphjlfff
Android: https://play.google.com/store/apps/details?id=io.simplelogin.android Android: https://play.google.com/store/apps/details?id=io.simplelogin.android
iOS: https://apps.apple.com/app/id1494359858 iOS: https://apps.apple.com/app/id1494359858
Github forum: https://github.com/simple-login/app/discussions Github repo: https://github.com/simple-login/app/discussions
Official forum: https://forum.simplelogin.io/
Reddit: https://www.reddit.com/r/Simplelogin/ Reddit: https://www.reddit.com/r/Simplelogin/
Twitter: https://twitter.com/simple_login Twitter: https://twitter.com/simple_login

View File

@ -71,9 +71,10 @@ Please note that you can't create more than {{ MAX_NB_EMAIL_FREE_PLAN }} aliases
{% endif %} {% endif %}
{% call text() %} {% call text() %}
For any question, feedback or feature request, please join our For any question or feedback,
<a href="https://github.com/simple-login/app/discussions">GitHub forum</a> please join our <a href="https://forum.simplelogin.io/">official forum</a>.
. If you want to request a feature,
please submit it on our <a href="https://github.com/simple-login/app/discussions">GitHub repo</a>.
You can also join our You can also join our
<a href="https://www.reddit.com/r/Simplelogin/">Reddit</a> <a href="https://www.reddit.com/r/Simplelogin/">Reddit</a>
or follow our or follow our

View File

@ -26,6 +26,8 @@ No worries: all aliases you create during this period will continue to work norm
At any time, you can reach out to us by simply replying to this email. At any time, you can reach out to us by simply replying to this email.
For any question, feedback or feature request, please join our GitHub forum at https://github.com/simple-login/app/discussions For any question or feedback, please join our official forum at https://forum.simplelogin.io/
If you want to request a feature, please submit it on our GitHub repo at https://github.com/simple-login/app/discussions
You can also join our Reddit at https://www.reddit.com/r/Simplelogin/ follow our Twitter at https://twitter.com/simplelogin You can also join our Reddit at https://www.reddit.com/r/Simplelogin/ follow our Twitter at https://twitter.com/simplelogin

View File

@ -4,6 +4,7 @@
{{ render_text("Thank you for choosing SimpleLogin.") }} {{ render_text("Thank you for choosing SimpleLogin.") }}
{{ render_text("To get started, please confirm that <b>" + email + "</b> is your email address by clicking on the button below within 1 hour.") }} {{ render_text("To get started, please confirm that <b>" + email + "</b> is your email address by clicking on the button below within 1 hour.") }}
{{ render_text("If it wasn't you, maybe someone entered your email by mistake. In this case you can ignore this mail.") }}
{{ render_button("Verify email", activation_link) }} {{ render_button("Verify email", activation_link) }}
{{ render_text('Thanks, {{ render_text('Thanks,
<br /> <br />

View File

@ -4,4 +4,6 @@
Thank you for choosing SimpleLogin. Thank you for choosing SimpleLogin.
To get started, please confirm that {{email}} is your email address using this link {{activation_link}} within 1 hour. To get started, please confirm that {{email}} is your email address using this link {{activation_link}} within 1 hour.
If it wasn't you, maybe someone entered your email by mistake. In this case you can ignore this mail.
{% endblock %} {% endblock %}

View File

@ -7,7 +7,7 @@
{% endcall %} {% endcall %}
{% call text() %} {% call text() %}
Your have tried to register multiple times to {{ service }}, and this is against the terms of service of SimpleLogin. Please don't do that anymore. You have tried to register multiple times to {{ service }}, and this is against the terms of service of SimpleLogin. Please don't do that anymore.
{% endcall %} {% endcall %}
{% call text() %} {% call text() %}

View File

@ -9,7 +9,7 @@
<a href='https://simplelogin.io/' aria-label="SimpleLogin"> <a href='https://simplelogin.io/' aria-label="SimpleLogin">
<img src="/static/logo-white.svg" <img src="/static/logo-white.svg"
height="30px" height="30px"
class="mb-3" class="mt-3 mb-3"
alt="SimpleLogin logo"> alt="SimpleLogin logo">
</a> </a>
<!-- End Logo --> <!-- End Logo -->
@ -17,8 +17,7 @@
SimpleLogin is an <a href="https://github.com/simple-login">open source</a> email alias solution to protect your email address. SimpleLogin is an <a href="https://github.com/simple-login">open source</a> email alias solution to protect your email address.
</p> </p>
<p class="small text-white"> <p class="small text-white">
SimpleLogin is the product of SimpleLogin SAS, registered in France under the SIREN number 884302134. SimpleLogin is the product of <a href="https://proton.me">Proton AG</a>, registered in Switzerland under number CHE-354.686.492.
SimpleLogin SAS is part of <a href="https://proton.me">Proton AG</a>.
</p> </p>
</div> </div>
</div> </div>
@ -38,12 +37,6 @@
alt="GitHub"> alt="GitHub">
</a> </a>
</li> </li>
<li>
<a class="list-group-item text-white footer-item "
href="https://github.com/simple-login/app/blob/master/docs/api.md">
API Docs
</a>
</li>
<li> <li>
<a class="list-group-item text-white footer-item " <a class="list-group-item text-white footer-item "
href="https://status.simplelogin.io/">Status</a> href="https://status.simplelogin.io/">Status</a>
@ -61,18 +54,10 @@
<a class="list-group-item text-white footer-item" <a class="list-group-item text-white footer-item"
href="https://simplelogin.io/blog/">Blog</a> href="https://simplelogin.io/blog/">Blog</a>
</li> </li>
<li>
<a class="list-group-item text-white footer-item"
href="https://simplelogin.io/job/">Join Us</a>
</li>
<li> <li>
<a class="list-group-item text-white footer-item" <a class="list-group-item text-white footer-item"
href="https://simplelogin.io/about/">About Us</a> href="https://simplelogin.io/about/">About Us</a>
</li> </li>
<li>
<a class="list-group-item text-white footer-item"
href="https://github.com/simple-login/app/projects/1">Roadmap</a>
</li>
<li> <li>
<a class="list-group-item text-white footer-item" <a class="list-group-item text-white footer-item"
href="https://simplelogin.io/contact/">Contact Us</a> href="https://simplelogin.io/contact/">Contact Us</a>
@ -106,37 +91,9 @@
<a class="list-group-item text-white footer-item " <a class="list-group-item text-white footer-item "
href="https://simplelogin.io/docs/">Documentation</a> href="https://simplelogin.io/docs/">Documentation</a>
</li> </li>
</ul>
</div>
<div class="col-sm-4 col-lg-2 mb-4">
<h3 class="h4 text-white">Comparisons</h3>
<ul class="list-group list-group-transparent list-group-white list-group-flush list-group-borderless mb-0 footer-list-group">
<li> <li>
<a class="list-group-item text-white footer-item" <a class="list-group-item text-white footer-item "
href="https://simplelogin.io/blog/email-alias-vs-plus-sign/"> href="https://forum.simplelogin.io">Forum</a>
vs Plus Sign (+) Trick
</a>
</li>
<li>
<a class="list-group-item text-white footer-item"
href="https://simplelogin.io/blog/vs-firefox-relay/">
vs
Firefox Relay
</a>
</li>
<li>
<a class="list-group-item text-white footer-item"
href="https://simplelogin.io/blog/vs-burner-mail/">
vs
Burner Mail
</a>
</li>
<li>
<a class="list-group-item text-white footer-item"
href="https://simplelogin.io/blog/alternative-33mail/">
vs
33mail
</a>
</li> </li>
</ul> </ul>
</div> </div>

Some files were not shown because too many files have changed in this diff Show More