Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
850fc95477 | |||
d172825900 | |||
026865e5bf | |||
add94ef2a2 | |||
1081400948 | |||
5776128905 | |||
d661860f4c | |||
0a52e32972 | |||
703dcbd0eb | |||
ce7ed69547 | |||
4f5564df16 | |||
2fee569131 | |||
7ea45d6f5d |
8
app/.github/workflows/main.yml
vendored
8
app/.github/workflows/main.yml
vendored
@ -15,9 +15,15 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.10'
|
||||||
cache: 'poetry'
|
cache: 'poetry'
|
||||||
|
|
||||||
|
- name: Install OS dependencies
|
||||||
|
if: ${{ matrix.python-version }} == '3.10'
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y libre2-dev libpq-dev
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||||
run: poetry install --no-interaction
|
run: poetry install --no-interaction
|
||||||
|
@ -7,18 +7,19 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- repo: https://github.com/psf/black
|
|
||||||
rev: 22.3.0
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
- repo: https://github.com/pycqa/flake8
|
|
||||||
rev: 3.9.2
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||||
rev: v1.3.0
|
rev: v1.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: djlint-jinja
|
- id: djlint-jinja
|
||||||
files: '.*\.html'
|
files: '.*\.html'
|
||||||
entry: djlint --reformat
|
entry: djlint --reformat
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
# Ruff version.
|
||||||
|
rev: v0.1.5
|
||||||
|
hooks:
|
||||||
|
# Run the linter.
|
||||||
|
- id: ruff
|
||||||
|
args: [ --fix ]
|
||||||
|
# Run the formatter.
|
||||||
|
- id: ruff-format
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ COPY poetry.lock pyproject.toml ./
|
|||||||
# Install and setup poetry
|
# Install and setup poetry
|
||||||
RUN pip install -U pip \
|
RUN pip install -U pip \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev \
|
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||||
# Remove curl and netcat from the image
|
# Remove curl and netcat from the image
|
||||||
&& apt-get purge -y curl netcat-traditional \
|
&& apt-get purge -y curl netcat-traditional \
|
||||||
@ -31,7 +31,7 @@ RUN pip install -U pip \
|
|||||||
&& poetry config virtualenvs.create false \
|
&& poetry config virtualenvs.create false \
|
||||||
&& poetry install --no-interaction --no-ansi --no-root \
|
&& poetry install --no-interaction --no-ansi --no-root \
|
||||||
# Clear apt cache \
|
# Clear apt cache \
|
||||||
&& apt-get purge -y libre2-dev \
|
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
@ -5,13 +5,15 @@ from typing import Optional
|
|||||||
|
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
from newrelic import agent
|
from newrelic import agent
|
||||||
|
from sqlalchemy import or_
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import send_welcome_email
|
from app.email_utils import send_welcome_email
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email, canonicalize_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AccountAlreadyLinkedToAnotherPartnerException,
|
AccountAlreadyLinkedToAnotherPartnerException,
|
||||||
AccountIsUsingAliasAsEmail,
|
AccountIsUsingAliasAsEmail,
|
||||||
|
AccountAlreadyLinkedToAnotherUserException,
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
@ -130,8 +132,9 @@ class ClientMergeStrategy(ABC):
|
|||||||
class NewUserStrategy(ClientMergeStrategy):
|
class NewUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
# Will create a new SL User with a random password
|
# Will create a new SL User with a random password
|
||||||
|
canonical_email = canonicalize_email(self.link_request.email)
|
||||||
new_user = User.create(
|
new_user = User.create(
|
||||||
email=self.link_request.email,
|
email=canonical_email,
|
||||||
name=self.link_request.name,
|
name=self.link_request.name,
|
||||||
password=random_string(20),
|
password=random_string(20),
|
||||||
activated=True,
|
activated=True,
|
||||||
@ -165,7 +168,6 @@ class NewUserStrategy(ClientMergeStrategy):
|
|||||||
|
|
||||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
|
|
||||||
partner_user = ensure_partner_user_exists_for_user(
|
partner_user = ensure_partner_user_exists_for_user(
|
||||||
self.link_request, self.user, self.partner
|
self.link_request, self.user, self.partner
|
||||||
)
|
)
|
||||||
@ -179,7 +181,7 @@ class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
|||||||
|
|
||||||
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
raise AccountAlreadyLinkedToAnotherUserException()
|
||||||
|
|
||||||
|
|
||||||
def get_login_strategy(
|
def get_login_strategy(
|
||||||
@ -212,11 +214,21 @@ def process_login_case(
|
|||||||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||||
)
|
)
|
||||||
if partner_user is None:
|
if partner_user is None:
|
||||||
|
canonical_email = canonicalize_email(link_request.email)
|
||||||
# We didn't find any SimpleLogin user registered with that partner user id
|
# We didn't find any SimpleLogin user registered with that partner user id
|
||||||
# Make sure they aren't using an alias as their link email
|
# Make sure they aren't using an alias as their link email
|
||||||
check_alias(link_request.email)
|
check_alias(link_request.email)
|
||||||
|
check_alias(canonical_email)
|
||||||
# Try to find it using the partner's e-mail address
|
# Try to find it using the partner's e-mail address
|
||||||
user = User.get_by(email=link_request.email)
|
users = User.filter(
|
||||||
|
or_(User.email == link_request.email, User.email == canonical_email)
|
||||||
|
).all()
|
||||||
|
if len(users) > 1:
|
||||||
|
user = [user for user in users if user.email == canonical_email][0]
|
||||||
|
elif len(users) == 1:
|
||||||
|
user = users[0]
|
||||||
|
else:
|
||||||
|
user = None
|
||||||
return get_login_strategy(link_request, user, partner).process()
|
return get_login_strategy(link_request, user, partner).process()
|
||||||
else:
|
else:
|
||||||
# We found the SL user registered with that partner user id
|
# We found the SL user registered with that partner user id
|
||||||
|
@ -256,6 +256,17 @@ class UserAdmin(SLModelView):
|
|||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
@action(
|
||||||
|
"clear_delete_on",
|
||||||
|
"Remove scheduled deletion of user",
|
||||||
|
"This will remove the scheduled deletion for this users",
|
||||||
|
)
|
||||||
|
def clean_delete_on(self, ids):
|
||||||
|
for user in User.filter(User.id.in_(ids)):
|
||||||
|
user.delete_on = None
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
# @action(
|
# @action(
|
||||||
# "login_as",
|
# "login_as",
|
||||||
# "Login as this user",
|
# "Login as this user",
|
||||||
@ -600,6 +611,26 @@ class NewsletterAdmin(SLModelView):
|
|||||||
else:
|
else:
|
||||||
flash(error_msg, "error")
|
flash(error_msg, "error")
|
||||||
|
|
||||||
|
@action(
|
||||||
|
"clone_newsletter",
|
||||||
|
"Clone this newsletter",
|
||||||
|
)
|
||||||
|
def clone_newsletter(self, newsletter_ids):
|
||||||
|
if len(newsletter_ids) != 1:
|
||||||
|
flash("you can only select 1 newsletter", "error")
|
||||||
|
return
|
||||||
|
|
||||||
|
newsletter_id = newsletter_ids[0]
|
||||||
|
newsletter: Newsletter = Newsletter.get(newsletter_id)
|
||||||
|
new_newsletter = Newsletter.create(
|
||||||
|
subject=newsletter.subject,
|
||||||
|
html=newsletter.html,
|
||||||
|
plain_text=newsletter.plain_text,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
|
||||||
|
|
||||||
|
|
||||||
class NewsletterUserAdmin(SLModelView):
|
class NewsletterUserAdmin(SLModelView):
|
||||||
column_searchable_list = ["id"]
|
column_searchable_list = ["id"]
|
||||||
|
@ -70,7 +70,6 @@ def verify_prefix_suffix(
|
|||||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||||
and not config.DISABLE_ALIAS_SUFFIX
|
and not config.DISABLE_ALIAS_SUFFIX
|
||||||
):
|
):
|
||||||
|
|
||||||
if not alias_domain_prefix.startswith("."):
|
if not alias_domain_prefix.startswith("."):
|
||||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||||
return False
|
return False
|
||||||
|
@ -21,6 +21,8 @@ from app.email_utils import (
|
|||||||
send_cannot_create_directory_alias_disabled,
|
send_cannot_create_directory_alias_disabled,
|
||||||
get_email_local_part,
|
get_email_local_part,
|
||||||
send_cannot_create_domain_alias,
|
send_cannot_create_domain_alias,
|
||||||
|
send_email,
|
||||||
|
render,
|
||||||
)
|
)
|
||||||
from app.errors import AliasInTrashError
|
from app.errors import AliasInTrashError
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
@ -36,6 +38,8 @@ from app.models import (
|
|||||||
EmailLog,
|
EmailLog,
|
||||||
Contact,
|
Contact,
|
||||||
AutoCreateRule,
|
AutoCreateRule,
|
||||||
|
AliasUsedOn,
|
||||||
|
ClientUser,
|
||||||
)
|
)
|
||||||
from app.regex_utils import regex_match
|
from app.regex_utils import regex_match
|
||||||
|
|
||||||
@ -399,3 +403,58 @@ def alias_export_csv(user, csv_direct_export=False):
|
|||||||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||||
output.headers["Content-type"] = "text/csv"
|
output.headers["Content-type"] = "text/csv"
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||||
|
# cannot transfer alias which is used for receiving newsletter
|
||||||
|
if User.get_by(newsletter_alias_id=alias.id):
|
||||||
|
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||||
|
|
||||||
|
# update user_id
|
||||||
|
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||||
|
{"user_id": new_user.id}
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||||
|
{"user_id": new_user.id}
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||||
|
{"user_id": new_user.id}
|
||||||
|
)
|
||||||
|
|
||||||
|
# remove existing mailboxes from the alias
|
||||||
|
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||||
|
|
||||||
|
# set mailboxes
|
||||||
|
alias.mailbox_id = new_mailboxes.pop().id
|
||||||
|
for mb in new_mailboxes:
|
||||||
|
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||||
|
|
||||||
|
# alias has never been transferred before
|
||||||
|
if not alias.original_owner_id:
|
||||||
|
alias.original_owner_id = alias.user_id
|
||||||
|
|
||||||
|
# inform previous owner
|
||||||
|
old_user = alias.user
|
||||||
|
send_email(
|
||||||
|
old_user.email,
|
||||||
|
f"Alias {alias.email} has been received",
|
||||||
|
render(
|
||||||
|
"transactional/alias-transferred.txt",
|
||||||
|
alias=alias,
|
||||||
|
),
|
||||||
|
render(
|
||||||
|
"transactional/alias-transferred.html",
|
||||||
|
alias=alias,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# now the alias belongs to the new user
|
||||||
|
alias.user_id = new_user.id
|
||||||
|
|
||||||
|
# set some fields back to default
|
||||||
|
alias.disable_pgp = False
|
||||||
|
alias.pinned = False
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
@ -16,3 +16,22 @@ from .views import (
|
|||||||
sudo,
|
sudo,
|
||||||
user,
|
user,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"alias_options",
|
||||||
|
"new_custom_alias",
|
||||||
|
"custom_domain",
|
||||||
|
"new_random_alias",
|
||||||
|
"user_info",
|
||||||
|
"auth",
|
||||||
|
"auth_mfa",
|
||||||
|
"alias",
|
||||||
|
"apple",
|
||||||
|
"mailbox",
|
||||||
|
"notification",
|
||||||
|
"setting",
|
||||||
|
"export",
|
||||||
|
"phone",
|
||||||
|
"sudo",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
@ -24,12 +24,14 @@ from app.errors import (
|
|||||||
ErrContactAlreadyExists,
|
ErrContactAlreadyExists,
|
||||||
ErrAddressInvalid,
|
ErrAddressInvalid,
|
||||||
)
|
)
|
||||||
|
from app.extensions import limiter
|
||||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||||
|
|
||||||
|
|
||||||
@deprecated
|
@deprecated
|
||||||
@api_bp.route("/aliases", methods=["GET", "POST"])
|
@api_bp.route("/aliases", methods=["GET", "POST"])
|
||||||
@require_api_auth
|
@require_api_auth
|
||||||
|
@limiter.limit("10/minute", key_func=lambda: g.user.id)
|
||||||
def get_aliases():
|
def get_aliases():
|
||||||
"""
|
"""
|
||||||
Get aliases
|
Get aliases
|
||||||
@ -72,6 +74,7 @@ def get_aliases():
|
|||||||
|
|
||||||
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
||||||
@require_api_auth
|
@require_api_auth
|
||||||
|
@limiter.limit("50/minute", key_func=lambda: g.user.id)
|
||||||
def get_aliases_v2():
|
def get_aliases_v2():
|
||||||
"""
|
"""
|
||||||
Get aliases
|
Get aliases
|
||||||
|
@ -63,6 +63,11 @@ def auth_login():
|
|||||||
elif user.disabled:
|
elif user.disabled:
|
||||||
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
||||||
return jsonify(error="Account disabled"), 400
|
return jsonify(error="Account disabled"), 400
|
||||||
|
elif user.delete_on is not None:
|
||||||
|
LoginEvent(
|
||||||
|
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
|
||||||
|
).send()
|
||||||
|
return jsonify(error="Account scheduled for deletion"), 400
|
||||||
elif not user.activated:
|
elif not user.activated:
|
||||||
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
||||||
return jsonify(error="Account not activated"), 422
|
return jsonify(error="Account not activated"), 422
|
||||||
|
@ -45,7 +45,7 @@ def create_mailbox():
|
|||||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||||
|
|
||||||
if not user.is_premium():
|
if not user.is_premium():
|
||||||
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
|
return jsonify(error="Only premium plan can add additional mailbox"), 400
|
||||||
|
|
||||||
if not is_valid_email(mailbox_email):
|
if not is_valid_email(mailbox_email):
|
||||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||||
|
@ -150,7 +150,7 @@ def new_custom_alias_v3():
|
|||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
|
||||||
if type(data) is not dict:
|
if not isinstance(data, dict):
|
||||||
return jsonify(error="request body does not follow the required format"), 400
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||||
@ -168,7 +168,7 @@ def new_custom_alias_v3():
|
|||||||
return jsonify(error="alias prefix invalid format or too long"), 400
|
return jsonify(error="alias prefix invalid format or too long"), 400
|
||||||
|
|
||||||
# check if mailbox is not tempered with
|
# check if mailbox is not tempered with
|
||||||
if type(mailbox_ids) is not list:
|
if not isinstance(mailbox_ids, list):
|
||||||
return jsonify(error="mailbox_ids must be an array of id"), 400
|
return jsonify(error="mailbox_ids must be an array of id"), 400
|
||||||
mailboxes = []
|
mailboxes = []
|
||||||
for mailbox_id in mailbox_ids:
|
for mailbox_id in mailbox_ids:
|
||||||
|
@ -17,3 +17,23 @@ from .views import (
|
|||||||
recovery,
|
recovery,
|
||||||
api_to_cookie,
|
api_to_cookie,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"login",
|
||||||
|
"logout",
|
||||||
|
"register",
|
||||||
|
"activate",
|
||||||
|
"resend_activation",
|
||||||
|
"reset_password",
|
||||||
|
"forgot_password",
|
||||||
|
"github",
|
||||||
|
"google",
|
||||||
|
"facebook",
|
||||||
|
"proton",
|
||||||
|
"change_email",
|
||||||
|
"mfa",
|
||||||
|
"fido",
|
||||||
|
"social",
|
||||||
|
"recovery",
|
||||||
|
"api_to_cookie",
|
||||||
|
]
|
||||||
|
@ -62,7 +62,7 @@ def fido():
|
|||||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash(f"Welcome back!", "success")
|
flash("Welcome back!", "success")
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
return redirect(next_url or url_for("dashboard.index"))
|
return redirect(next_url or url_for("dashboard.index"))
|
||||||
else:
|
else:
|
||||||
@ -110,7 +110,7 @@ def fido():
|
|||||||
|
|
||||||
session["sudo_time"] = int(time())
|
session["sudo_time"] = int(time())
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash(f"Welcome back!", "success")
|
flash("Welcome back!", "success")
|
||||||
|
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||||
|
@ -54,6 +54,12 @@ def login():
|
|||||||
"error",
|
"error",
|
||||||
)
|
)
|
||||||
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
||||||
|
elif user.delete_on is not None:
|
||||||
|
flash(
|
||||||
|
f"Your account is scheduled to be deleted on {user.delete_on}",
|
||||||
|
"error",
|
||||||
|
)
|
||||||
|
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
|
||||||
elif not user.activated:
|
elif not user.activated:
|
||||||
show_resend_activation = True
|
show_resend_activation = True
|
||||||
flash(
|
flash(
|
||||||
|
@ -55,7 +55,7 @@ def mfa():
|
|||||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash(f"Welcome back!", "success")
|
flash("Welcome back!", "success")
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
return redirect(next_url or url_for("dashboard.index"))
|
return redirect(next_url or url_for("dashboard.index"))
|
||||||
else:
|
else:
|
||||||
@ -73,7 +73,7 @@ def mfa():
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash(f"Welcome back!", "success")
|
flash("Welcome back!", "success")
|
||||||
|
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||||
|
@ -53,7 +53,7 @@ def recovery_route():
|
|||||||
del session[MFA_USER_ID]
|
del session[MFA_USER_ID]
|
||||||
|
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash(f"Welcome back!", "success")
|
flash("Welcome back!", "success")
|
||||||
|
|
||||||
recovery_code.used = True
|
recovery_code.used = True
|
||||||
recovery_code.used_at = arrow.now()
|
recovery_code.used_at = arrow.now()
|
||||||
|
@ -94,9 +94,7 @@ def register():
|
|||||||
try:
|
try:
|
||||||
send_activation_email(user, next_url)
|
send_activation_email(user, next_url)
|
||||||
RegisterEvent(RegisterEvent.ActionType.success).send()
|
RegisterEvent(RegisterEvent.ActionType.success).send()
|
||||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
|
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
|
||||||
1
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
flash("Invalid email, are you sure the email is correct?", "error")
|
flash("Invalid email, are you sure the email is correct?", "error")
|
||||||
|
@ -179,6 +179,7 @@ AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
|
|||||||
BUCKET = os.environ.get("BUCKET")
|
BUCKET = os.environ.get("BUCKET")
|
||||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
||||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
||||||
|
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
|
||||||
|
|
||||||
# Paddle
|
# Paddle
|
||||||
try:
|
try:
|
||||||
|
@ -33,3 +33,39 @@ from .views import (
|
|||||||
notification,
|
notification,
|
||||||
support,
|
support,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"index",
|
||||||
|
"pricing",
|
||||||
|
"setting",
|
||||||
|
"custom_alias",
|
||||||
|
"subdomain",
|
||||||
|
"billing",
|
||||||
|
"alias_log",
|
||||||
|
"alias_export",
|
||||||
|
"unsubscribe",
|
||||||
|
"api_key",
|
||||||
|
"custom_domain",
|
||||||
|
"alias_contact_manager",
|
||||||
|
"enter_sudo",
|
||||||
|
"mfa_setup",
|
||||||
|
"mfa_cancel",
|
||||||
|
"fido_setup",
|
||||||
|
"coupon",
|
||||||
|
"fido_manage",
|
||||||
|
"domain_detail",
|
||||||
|
"lifetime_licence",
|
||||||
|
"directory",
|
||||||
|
"mailbox",
|
||||||
|
"mailbox_detail",
|
||||||
|
"refused_email",
|
||||||
|
"referral",
|
||||||
|
"contact_detail",
|
||||||
|
"setup_done",
|
||||||
|
"batch_import",
|
||||||
|
"alias_transfer",
|
||||||
|
"app",
|
||||||
|
"delete_account",
|
||||||
|
"notification",
|
||||||
|
"support",
|
||||||
|
]
|
||||||
|
@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
|
|||||||
contact=contact,
|
contact=contact,
|
||||||
)
|
)
|
||||||
logs.append(al)
|
logs.append(al)
|
||||||
logs = sorted(logs, key=lambda l: l.when, reverse=True)
|
logs = sorted(logs, key=lambda log: log.when, reverse=True)
|
||||||
|
|
||||||
return logs
|
return logs
|
||||||
|
@ -7,79 +7,19 @@ from flask import render_template, redirect, url_for, flash, request
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app.alias_utils import transfer_alias
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import send_email, render
|
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
Contact,
|
|
||||||
AliasUsedOn,
|
|
||||||
AliasMailbox,
|
|
||||||
User,
|
|
||||||
ClientUser,
|
|
||||||
)
|
)
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
|
|
||||||
# cannot transfer alias which is used for receiving newsletter
|
|
||||||
if User.get_by(newsletter_alias_id=alias.id):
|
|
||||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
|
||||||
|
|
||||||
# update user_id
|
|
||||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
|
||||||
{"user_id": new_user.id}
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
|
||||||
{"user_id": new_user.id}
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
|
||||||
{"user_id": new_user.id}
|
|
||||||
)
|
|
||||||
|
|
||||||
# remove existing mailboxes from the alias
|
|
||||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
|
||||||
|
|
||||||
# set mailboxes
|
|
||||||
alias.mailbox_id = new_mailboxes.pop().id
|
|
||||||
for mb in new_mailboxes:
|
|
||||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
|
||||||
|
|
||||||
# alias has never been transferred before
|
|
||||||
if not alias.original_owner_id:
|
|
||||||
alias.original_owner_id = alias.user_id
|
|
||||||
|
|
||||||
# inform previous owner
|
|
||||||
old_user = alias.user
|
|
||||||
send_email(
|
|
||||||
old_user.email,
|
|
||||||
f"Alias {alias.email} has been received",
|
|
||||||
render(
|
|
||||||
"transactional/alias-transferred.txt",
|
|
||||||
alias=alias,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/alias-transferred.html",
|
|
||||||
alias=alias,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# now the alias belongs to the new user
|
|
||||||
alias.user_id = new_user.id
|
|
||||||
|
|
||||||
# set some fields back to default
|
|
||||||
alias.disable_pgp = False
|
|
||||||
alias.pinned = False
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
||||||
alias_hmac = hmac.new(
|
alias_hmac = hmac.new(
|
||||||
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
||||||
@ -214,7 +154,7 @@ def alias_transfer_receive_route():
|
|||||||
mailboxes,
|
mailboxes,
|
||||||
token,
|
token,
|
||||||
)
|
)
|
||||||
transfer(alias, current_user, mailboxes)
|
transfer_alias(alias, current_user, mailboxes)
|
||||||
|
|
||||||
# reset transfer token
|
# reset transfer token
|
||||||
alias.transfer_token = None
|
alias.transfer_token = None
|
||||||
|
@ -1,14 +1,9 @@
|
|||||||
from app.db import Session
|
|
||||||
|
|
||||||
"""
|
|
||||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import render_template, request, flash, redirect
|
from flask import render_template, request, flash, redirect
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
|
from app.db import Session
|
||||||
from app.models import (
|
from app.models import (
|
||||||
ClientUser,
|
ClientUser,
|
||||||
)
|
)
|
||||||
@ -17,6 +12,10 @@ from app.models import (
|
|||||||
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def app_route():
|
def app_route():
|
||||||
|
"""
|
||||||
|
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||||
|
"""
|
||||||
|
|
||||||
client_users = (
|
client_users = (
|
||||||
ClientUser.filter_by(user_id=current_user.id)
|
ClientUser.filter_by(user_id=current_user.id)
|
||||||
.options(joinedload(ClientUser.client))
|
.options(joinedload(ClientUser.client))
|
||||||
|
@ -100,7 +100,7 @@ def coupon_route():
|
|||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
flash(
|
flash(
|
||||||
f"Your account has been upgraded to Premium, thanks for your support!",
|
"Your account has been upgraded to Premium, thanks for your support!",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ from app.models import (
|
|||||||
AliasMailbox,
|
AliasMailbox,
|
||||||
DomainDeletedAlias,
|
DomainDeletedAlias,
|
||||||
)
|
)
|
||||||
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
|
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
|
||||||
@ -48,9 +49,13 @@ def custom_alias():
|
|||||||
at_least_a_premium_domain = True
|
at_least_a_premium_domain = True
|
||||||
break
|
break
|
||||||
|
|
||||||
|
csrf_form = CSRFValidationForm()
|
||||||
mailboxes = current_user.mailboxes()
|
mailboxes = current_user.mailboxes()
|
||||||
|
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
|
if not csrf_form.validate():
|
||||||
|
flash("Invalid request", "warning")
|
||||||
|
return redirect(request.url)
|
||||||
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
|
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
|
||||||
signed_alias_suffix = request.form.get("signed-alias-suffix")
|
signed_alias_suffix = request.form.get("signed-alias-suffix")
|
||||||
mailbox_ids = request.form.getlist("mailboxes")
|
mailbox_ids = request.form.getlist("mailboxes")
|
||||||
@ -164,4 +169,5 @@ def custom_alias():
|
|||||||
alias_suffixes=alias_suffixes,
|
alias_suffixes=alias_suffixes,
|
||||||
at_least_a_premium_domain=at_least_a_premium_domain,
|
at_least_a_premium_domain=at_least_a_premium_domain,
|
||||||
mailboxes=mailboxes,
|
mailboxes=mailboxes,
|
||||||
|
csrf_form=csrf_form,
|
||||||
)
|
)
|
||||||
|
@ -67,7 +67,7 @@ def directory():
|
|||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
if request.form.get("form-name") == "delete":
|
if request.form.get("form-name") == "delete":
|
||||||
if not delete_dir_form.validate():
|
if not delete_dir_form.validate():
|
||||||
flash(f"Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ def directory():
|
|||||||
|
|
||||||
if request.form.get("form-name") == "toggle-directory":
|
if request.form.get("form-name") == "toggle-directory":
|
||||||
if not toggle_dir_form.validate():
|
if not toggle_dir_form.validate():
|
||||||
flash(f"Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = toggle_dir_form.directory_id.data
|
dir_id = toggle_dir_form.directory_id.data
|
||||||
dir_obj = Directory.get(dir_id)
|
dir_obj = Directory.get(dir_id)
|
||||||
@ -109,7 +109,7 @@ def directory():
|
|||||||
|
|
||||||
elif request.form.get("form-name") == "update":
|
elif request.form.get("form-name") == "update":
|
||||||
if not update_dir_form.validate():
|
if not update_dir_form.validate():
|
||||||
flash(f"Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = update_dir_form.directory_id.data
|
dir_id = update_dir_form.directory_id.data
|
||||||
dir_obj = Directory.get(dir_id)
|
dir_obj = Directory.get(dir_id)
|
||||||
|
@ -57,6 +57,7 @@ def get_stats(user: User) -> Stats:
|
|||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
||||||
)
|
)
|
||||||
|
@limiter.limit("10/minute", methods=["GET"], key_func=lambda: current_user.id)
|
||||||
@login_required
|
@login_required
|
||||||
@parallel_limiter.lock(
|
@parallel_limiter.lock(
|
||||||
name="alias_creation",
|
name="alias_creation",
|
||||||
|
@ -128,7 +128,6 @@ def setting():
|
|||||||
new_email_valid = True
|
new_email_valid = True
|
||||||
new_email = canonicalize_email(change_email_form.email.data)
|
new_email = canonicalize_email(change_email_form.email.data)
|
||||||
if new_email != current_user.email and not pending_email:
|
if new_email != current_user.email and not pending_email:
|
||||||
|
|
||||||
# check if this email is not already used
|
# check if this email is not already used
|
||||||
if personal_email_already_used(new_email) or Alias.get_by(
|
if personal_email_already_used(new_email) or Alias.get_by(
|
||||||
email=new_email
|
email=new_email
|
||||||
|
@ -75,12 +75,11 @@ def block_contact(contact_id):
|
|||||||
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def encoded_unsubscribe(encoded_request: str):
|
def encoded_unsubscribe(encoded_request: str):
|
||||||
|
|
||||||
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
||||||
current_user, encoded_request
|
current_user, encoded_request
|
||||||
)
|
)
|
||||||
if not unsub_data:
|
if not unsub_data:
|
||||||
flash(f"Invalid unsubscribe request", "error")
|
flash("Invalid unsubscribe request", "error")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
||||||
alias = Alias.get(unsub_data.data)
|
alias = Alias.get(unsub_data.data)
|
||||||
@ -97,14 +96,14 @@ def encoded_unsubscribe(encoded_request: str):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
||||||
flash(f"You've unsubscribed from the newsletter", "success")
|
flash("You've unsubscribed from the newsletter", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.index",
|
"dashboard.index",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||||
flash(f"The original unsubscribe request has been forwarded", "success")
|
flash("The original unsubscribe request has been forwarded", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.index",
|
"dashboard.index",
|
||||||
|
@ -1 +1,3 @@
|
|||||||
from .views import index, new_client, client_detail
|
from .views import index, new_client, client_detail
|
||||||
|
|
||||||
|
__all__ = ["index", "new_client", "client_detail"]
|
||||||
|
@ -87,7 +87,7 @@ def client_detail(client_id):
|
|||||||
)
|
)
|
||||||
|
|
||||||
flash(
|
flash(
|
||||||
f"Thanks for submitting, we are informed and will come back to you asap!",
|
"Thanks for submitting, we are informed and will come back to you asap!",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1 +1,3 @@
|
|||||||
from .views import index
|
from .views import index
|
||||||
|
|
||||||
|
__all__ = ["index"]
|
||||||
|
@ -93,7 +93,7 @@ def send_welcome_email(user):
|
|||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
comm_email,
|
comm_email,
|
||||||
f"Welcome to SimpleLogin",
|
"Welcome to SimpleLogin",
|
||||||
render("com/welcome.txt", user=user, alias=alias),
|
render("com/welcome.txt", user=user, alias=alias),
|
||||||
render("com/welcome.html", user=user, alias=alias),
|
render("com/welcome.html", user=user, alias=alias),
|
||||||
unsubscribe_link,
|
unsubscribe_link,
|
||||||
@ -104,7 +104,7 @@ def send_welcome_email(user):
|
|||||||
def send_trial_end_soon_email(user):
|
def send_trial_end_soon_email(user):
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
f"Your trial will end soon",
|
"Your trial will end soon",
|
||||||
render("transactional/trial-end.txt.jinja2", user=user),
|
render("transactional/trial-end.txt.jinja2", user=user),
|
||||||
render("transactional/trial-end.html", user=user),
|
render("transactional/trial-end.html", user=user),
|
||||||
ignore_smtp_error=True,
|
ignore_smtp_error=True,
|
||||||
@ -114,7 +114,7 @@ def send_trial_end_soon_email(user):
|
|||||||
def send_activation_email(email, activation_link):
|
def send_activation_email(email, activation_link):
|
||||||
send_email(
|
send_email(
|
||||||
email,
|
email,
|
||||||
f"Just one more step to join SimpleLogin",
|
"Just one more step to join SimpleLogin",
|
||||||
render(
|
render(
|
||||||
"transactional/activation.txt",
|
"transactional/activation.txt",
|
||||||
activation_link=activation_link,
|
activation_link=activation_link,
|
||||||
@ -768,7 +768,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
|
|||||||
ret = ""
|
ret = ""
|
||||||
for to_decoded_str, charset in decode_header(header):
|
for to_decoded_str, charset in decode_header(header):
|
||||||
if charset is None:
|
if charset is None:
|
||||||
if type(to_decoded_str) is bytes:
|
if isinstance(to_decoded_str, bytes):
|
||||||
decoded_str = to_decoded_str.decode()
|
decoded_str = to_decoded_str.decode()
|
||||||
else:
|
else:
|
||||||
decoded_str = to_decoded_str
|
decoded_str = to_decoded_str
|
||||||
@ -805,13 +805,13 @@ def to_bytes(msg: Message):
|
|||||||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||||
try:
|
try:
|
||||||
return msg.as_bytes(policy=generator_policy)
|
return msg.as_bytes(policy=generator_policy)
|
||||||
except:
|
except Exception:
|
||||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||||
|
|
||||||
msg_string = msg.as_string()
|
msg_string = msg.as_string()
|
||||||
try:
|
try:
|
||||||
return msg_string.encode()
|
return msg_string.encode()
|
||||||
except:
|
except Exception:
|
||||||
LOG.w("as_string().encode() fails", exc_info=True)
|
LOG.w("as_string().encode() fails", exc_info=True)
|
||||||
|
|
||||||
return msg_string.encode(errors="replace")
|
return msg_string.encode(errors="replace")
|
||||||
@ -906,7 +906,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||||||
if content_type == "text/plain":
|
if content_type == "text/plain":
|
||||||
encoding = get_encoding(msg)
|
encoding = get_encoding(msg)
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
if type(payload) is str:
|
if isinstance(payload, str):
|
||||||
clone_msg = copy(msg)
|
clone_msg = copy(msg)
|
||||||
new_payload = f"""{text_header}
|
new_payload = f"""{text_header}
|
||||||
------------------------------
|
------------------------------
|
||||||
@ -916,7 +916,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||||||
elif content_type == "text/html":
|
elif content_type == "text/html":
|
||||||
encoding = get_encoding(msg)
|
encoding = get_encoding(msg)
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
if type(payload) is str:
|
if isinstance(payload, str):
|
||||||
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
||||||
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
||||||
<tr>
|
<tr>
|
||||||
@ -972,7 +972,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||||||
|
|
||||||
|
|
||||||
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||||
if type(msg) is str:
|
if isinstance(msg, str):
|
||||||
msg = msg.replace(old, new)
|
msg = msg.replace(old, new)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
@ -995,7 +995,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
|||||||
if content_type in ("text/plain", "text/html"):
|
if content_type in ("text/plain", "text/html"):
|
||||||
encoding = get_encoding(msg)
|
encoding = get_encoding(msg)
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
if type(payload) is str:
|
if isinstance(payload, str):
|
||||||
if encoding == EmailEncoding.QUOTED:
|
if encoding == EmailEncoding.QUOTED:
|
||||||
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
||||||
# first decode the payload
|
# first decode the payload
|
||||||
|
@ -9,6 +9,7 @@ class LoginEvent:
|
|||||||
failed = 1
|
failed = 1
|
||||||
disabled_login = 2
|
disabled_login = 2
|
||||||
not_activated = 3
|
not_activated = 3
|
||||||
|
scheduled_to_be_deleted = 4
|
||||||
|
|
||||||
class Source(EnumE):
|
class Source(EnumE):
|
||||||
web = 0
|
web = 0
|
||||||
|
@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
|
|||||||
|
|
||||||
from_header = get_header_unicode(msg[headers.FROM])
|
from_header = get_header_unicode(msg[headers.FROM])
|
||||||
|
|
||||||
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||||
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||||
"""
|
"""
|
||||||
warning_html = f"""
|
warning_html = """
|
||||||
<p style="color:red">
|
<p style="color:red">
|
||||||
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||||
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
||||||
|
@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
if is_deleted_alias(msg_info.sender_address):
|
if is_deleted_alias(msg_info.sender_address):
|
||||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
||||||
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
|||||||
alias = find_alias_with_address(msg_info.rcpt_address)
|
alias = find_alias_with_address(msg_info.rcpt_address)
|
||||||
|
|
||||||
if is_deleted_alias(msg_info.rcpt_address):
|
if is_deleted_alias(msg_info.rcpt_address):
|
||||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not alias:
|
if not alias:
|
||||||
|
@ -54,9 +54,8 @@ class UnsubscribeEncoder:
|
|||||||
def encode_subject(
|
def encode_subject(
|
||||||
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
||||||
) -> str:
|
) -> str:
|
||||||
if (
|
if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
|
||||||
action != UnsubscribeAction.OriginalUnsubscribeMailto
|
data, int
|
||||||
and type(data) is not int
|
|
||||||
):
|
):
|
||||||
raise ValueError(f"Data has to be an int for an action of type {action}")
|
raise ValueError(f"Data has to be an int for an action of type {action}")
|
||||||
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import urllib
|
import urllib
|
||||||
|
from email.header import Header
|
||||||
from email.message import Message
|
from email.message import Message
|
||||||
|
|
||||||
from app.email import headers
|
from app.email import headers
|
||||||
@ -33,6 +34,8 @@ class UnsubscribeGenerator:
|
|||||||
if not unsubscribe_data:
|
if not unsubscribe_data:
|
||||||
LOG.info("Email has no unsubscribe header")
|
LOG.info("Email has no unsubscribe header")
|
||||||
return message
|
return message
|
||||||
|
if isinstance(unsubscribe_data, Header):
|
||||||
|
unsubscribe_data = str(unsubscribe_data.encode())
|
||||||
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
||||||
mailto_unsubs = None
|
mailto_unsubs = None
|
||||||
other_unsubs = []
|
other_unsubs = []
|
||||||
|
@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
|
|||||||
|
|
||||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||||
r = requests.get(file_url)
|
r = requests.get(file_url)
|
||||||
lines = [line.decode() for line in r.iter_lines()]
|
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
||||||
|
|
||||||
import_from_csv(batch_import, user, lines)
|
import_from_csv(batch_import, user, lines)
|
||||||
|
|
||||||
|
@ -1,2 +1,4 @@
|
|||||||
from .integrations import set_enable_proton_cookie
|
from .integrations import set_enable_proton_cookie
|
||||||
from .exit_sudo import exit_sudo_mode
|
from .exit_sudo import exit_sudo_mode
|
||||||
|
|
||||||
|
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]
|
||||||
|
@ -39,7 +39,6 @@ from app.models import (
|
|||||||
|
|
||||||
|
|
||||||
class ExportUserDataJob:
|
class ExportUserDataJob:
|
||||||
|
|
||||||
REMOVE_FIELDS = {
|
REMOVE_FIELDS = {
|
||||||
"User": ("otp_secret", "password"),
|
"User": ("otp_secret", "password"),
|
||||||
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
||||||
|
@ -22,7 +22,6 @@ from app.message_utils import message_to_bytes, message_format_base64_parts
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class SendRequest:
|
class SendRequest:
|
||||||
|
|
||||||
SAVE_EXTENSION = "sendrequest"
|
SAVE_EXTENSION = "sendrequest"
|
||||||
|
|
||||||
envelope_from: str
|
envelope_from: str
|
||||||
|
@ -280,6 +280,7 @@ class IntEnumType(sa.types.TypeDecorator):
|
|||||||
class AliasOptions:
|
class AliasOptions:
|
||||||
show_sl_domains: bool = True
|
show_sl_domains: bool = True
|
||||||
show_partner_domains: Optional[Partner] = None
|
show_partner_domains: Optional[Partner] = None
|
||||||
|
show_partner_premium: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
class Hibp(Base, ModelMixin):
|
class Hibp(Base, ModelMixin):
|
||||||
@ -539,10 +540,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Trigger hard deletion of the account at this time
|
||||||
|
delete_on = sa.Column(ArrowType, default=None)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.Index(
|
sa.Index(
|
||||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||||
),
|
),
|
||||||
|
sa.Index("ix_users_delete_on", delete_on),
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -833,6 +838,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
< self.max_alias_for_free_account()
|
< self.max_alias_for_free_account()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def can_send_or_receive(self) -> bool:
|
||||||
|
if self.disabled:
|
||||||
|
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
|
||||||
|
return False
|
||||||
|
if self.delete_on is not None:
|
||||||
|
LOG.i(
|
||||||
|
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def profile_picture_url(self):
|
def profile_picture_url(self):
|
||||||
if self.profile_picture_id:
|
if self.profile_picture_id:
|
||||||
return self.profile_picture.get_url()
|
return self.profile_picture.get_url()
|
||||||
@ -1023,29 +1039,35 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
) -> list["SLDomain"]:
|
) -> list["SLDomain"]:
|
||||||
if alias_options is None:
|
if alias_options is None:
|
||||||
alias_options = AliasOptions()
|
alias_options = AliasOptions()
|
||||||
conditions = [SLDomain.hidden == False] # noqa: E712
|
top_conds = [SLDomain.hidden == False] # noqa: E712
|
||||||
if not self.is_premium():
|
or_conds = [] # noqa:E711
|
||||||
conditions.append(SLDomain.premium_only == False) # noqa: E712
|
|
||||||
partner_domain_cond = [] # noqa:E711
|
|
||||||
if self.default_alias_public_domain_id is not None:
|
if self.default_alias_public_domain_id is not None:
|
||||||
partner_domain_cond.append(
|
default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
|
||||||
SLDomain.id == self.default_alias_public_domain_id
|
if not self.is_premium():
|
||||||
)
|
default_domain_conds.append(
|
||||||
|
SLDomain.premium_only == False # noqa: E712
|
||||||
|
)
|
||||||
|
or_conds.append(and_(*default_domain_conds).self_group())
|
||||||
if alias_options.show_partner_domains is not None:
|
if alias_options.show_partner_domains is not None:
|
||||||
partner_user = PartnerUser.filter_by(
|
partner_user = PartnerUser.filter_by(
|
||||||
user_id=self.id, partner_id=alias_options.show_partner_domains.id
|
user_id=self.id, partner_id=alias_options.show_partner_domains.id
|
||||||
).first()
|
).first()
|
||||||
if partner_user is not None:
|
if partner_user is not None:
|
||||||
partner_domain_cond.append(
|
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
|
||||||
SLDomain.partner_id == partner_user.partner_id
|
if alias_options.show_partner_premium is None:
|
||||||
)
|
alias_options.show_partner_premium = self.is_premium()
|
||||||
|
if not alias_options.show_partner_premium:
|
||||||
|
partner_domain_cond.append(
|
||||||
|
SLDomain.premium_only == False # noqa: E712
|
||||||
|
)
|
||||||
|
or_conds.append(and_(*partner_domain_cond).self_group())
|
||||||
if alias_options.show_sl_domains:
|
if alias_options.show_sl_domains:
|
||||||
partner_domain_cond.append(SLDomain.partner_id == None) # noqa:E711
|
sl_conds = [SLDomain.partner_id == None] # noqa: E711
|
||||||
if len(partner_domain_cond) == 1:
|
if not self.is_premium():
|
||||||
conditions.append(partner_domain_cond[0])
|
sl_conds.append(SLDomain.premium_only == False) # noqa: E712
|
||||||
else:
|
or_conds.append(and_(*sl_conds).self_group())
|
||||||
conditions.append(or_(*partner_domain_cond))
|
top_conds.append(or_(*or_conds))
|
||||||
query = Session.query(SLDomain).filter(*conditions).order_by(SLDomain.order)
|
query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
|
||||||
return query.all()
|
return query.all()
|
||||||
|
|
||||||
def available_alias_domains(
|
def available_alias_domains(
|
||||||
@ -1925,6 +1947,7 @@ class Contact(Base, ModelMixin):
|
|||||||
|
|
||||||
class EmailLog(Base, ModelMixin):
|
class EmailLog(Base, ModelMixin):
|
||||||
__tablename__ = "email_log"
|
__tablename__ = "email_log"
|
||||||
|
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||||
|
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||||
@ -2576,6 +2599,7 @@ class Mailbox(Base, ModelMixin):
|
|||||||
self.email.endswith("@proton.me")
|
self.email.endswith("@proton.me")
|
||||||
or self.email.endswith("@protonmail.com")
|
or self.email.endswith("@protonmail.com")
|
||||||
or self.email.endswith("@protonmail.ch")
|
or self.email.endswith("@protonmail.ch")
|
||||||
|
or self.email.endswith("@proton.ch")
|
||||||
or self.email.endswith("@pm.me")
|
or self.email.endswith("@pm.me")
|
||||||
):
|
):
|
||||||
return True
|
return True
|
||||||
@ -3160,7 +3184,7 @@ class MessageIDMatching(Base, ModelMixin):
|
|||||||
|
|
||||||
# to track what email_log that has created this matching
|
# to track what email_log that has created this matching
|
||||||
email_log_id = sa.Column(
|
email_log_id = sa.Column(
|
||||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True
|
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
|
||||||
)
|
)
|
||||||
|
|
||||||
email_log = orm.relationship("EmailLog")
|
email_log = orm.relationship("EmailLog")
|
||||||
@ -3493,7 +3517,7 @@ class PartnerSubscription(Base, ModelMixin):
|
|||||||
|
|
||||||
class Newsletter(Base, ModelMixin):
|
class Newsletter(Base, ModelMixin):
|
||||||
__tablename__ = "newsletter"
|
__tablename__ = "newsletter"
|
||||||
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
subject = sa.Column(sa.String(), nullable=False, index=True)
|
||||||
|
|
||||||
html = sa.Column(sa.Text)
|
html = sa.Column(sa.Text)
|
||||||
plain_text = sa.Column(sa.Text)
|
plain_text = sa.Column(sa.Text)
|
||||||
|
@ -1 +1,3 @@
|
|||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
|
__all__ = ["views"]
|
||||||
|
@ -1 +1,3 @@
|
|||||||
from .views import authorize, token, user_info
|
from .views import authorize, token, user_info
|
||||||
|
|
||||||
|
__all__ = ["authorize", "token", "user_info"]
|
||||||
|
@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
|
|||||||
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
||||||
"""
|
"""
|
||||||
res = set()
|
res = set()
|
||||||
if type(arg_input) is str:
|
if isinstance(arg_input, str):
|
||||||
if " " in arg_input:
|
if " " in arg_input:
|
||||||
for x in arg_input.split(" "):
|
for x in arg_input.split(" "):
|
||||||
if x:
|
if x:
|
||||||
|
@ -5,3 +5,11 @@ from .views import (
|
|||||||
account_activated,
|
account_activated,
|
||||||
extension_redirect,
|
extension_redirect,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"index",
|
||||||
|
"final",
|
||||||
|
"setup_done",
|
||||||
|
"account_activated",
|
||||||
|
"extension_redirect",
|
||||||
|
]
|
||||||
|
@ -39,7 +39,6 @@ class _InnerLock:
|
|||||||
lock_redis.storage.delete(lock_name)
|
lock_redis.storage.delete(lock_name)
|
||||||
|
|
||||||
def __call__(self, f: Callable[..., Any]):
|
def __call__(self, f: Callable[..., Any]):
|
||||||
|
|
||||||
if self.lock_suffix is None:
|
if self.lock_suffix is None:
|
||||||
lock_suffix = f.__name__
|
lock_suffix = f.__name__
|
||||||
else:
|
else:
|
||||||
|
@ -5,3 +5,11 @@ from .views import (
|
|||||||
provider1_callback,
|
provider1_callback,
|
||||||
provider2_callback,
|
provider2_callback,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"index",
|
||||||
|
"phone_reservation",
|
||||||
|
"twilio_callback",
|
||||||
|
"provider1_callback",
|
||||||
|
"provider2_callback",
|
||||||
|
]
|
||||||
|
@ -6,7 +6,6 @@ from app.session import RedisSessionStore
|
|||||||
|
|
||||||
|
|
||||||
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
||||||
|
|
||||||
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
||||||
storage = limits.storage.RedisStorage(redis_url)
|
storage = limits.storage.RedisStorage(redis_url)
|
||||||
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
||||||
|
@ -13,17 +13,29 @@ from app.config import (
|
|||||||
LOCAL_FILE_UPLOAD,
|
LOCAL_FILE_UPLOAD,
|
||||||
UPLOAD_DIR,
|
UPLOAD_DIR,
|
||||||
URL,
|
URL,
|
||||||
|
AWS_ENDPOINT_URL,
|
||||||
)
|
)
|
||||||
|
from app.log import LOG
|
||||||
if not LOCAL_FILE_UPLOAD:
|
|
||||||
_session = boto3.Session(
|
|
||||||
aws_access_key_id=AWS_ACCESS_KEY_ID,
|
|
||||||
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
|
|
||||||
region_name=AWS_REGION,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
_s3_client = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_s3client():
|
||||||
|
global _s3_client
|
||||||
|
if _s3_client is None:
|
||||||
|
args = {
|
||||||
|
"aws_access_key_id": AWS_ACCESS_KEY_ID,
|
||||||
|
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
|
||||||
|
"region_name": AWS_REGION,
|
||||||
|
}
|
||||||
|
if AWS_ENDPOINT_URL:
|
||||||
|
args["endpoint_url"] = AWS_ENDPOINT_URL
|
||||||
|
_s3_client = boto3.client("s3", **args)
|
||||||
|
return _s3_client
|
||||||
|
|
||||||
|
|
||||||
|
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
||||||
bs.seek(0)
|
bs.seek(0)
|
||||||
|
|
||||||
if LOCAL_FILE_UPLOAD:
|
if LOCAL_FILE_UPLOAD:
|
||||||
@ -34,7 +46,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
|||||||
f.write(bs.read())
|
f.write(bs.read())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
_get_s3client().put_object(
|
||||||
|
Bucket=BUCKET,
|
||||||
Key=key,
|
Key=key,
|
||||||
Body=bs,
|
Body=bs,
|
||||||
ContentType=content_type,
|
ContentType=content_type,
|
||||||
@ -52,7 +65,8 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
|||||||
f.write(bs.read())
|
f.write(bs.read())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
_get_s3client().put_object(
|
||||||
|
Bucket=BUCKET,
|
||||||
Key=path,
|
Key=path,
|
||||||
Body=bs,
|
Body=bs,
|
||||||
# Support saving a remote file using Http header
|
# Support saving a remote file using Http header
|
||||||
@ -67,12 +81,9 @@ def download_email(path: str) -> Optional[str]:
|
|||||||
file_path = os.path.join(UPLOAD_DIR, path)
|
file_path = os.path.join(UPLOAD_DIR, path)
|
||||||
with open(file_path, "rb") as f:
|
with open(file_path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
resp = (
|
resp = _get_s3client().get_object(
|
||||||
_session.resource("s3")
|
Bucket=BUCKET,
|
||||||
.Bucket(BUCKET)
|
Key=path,
|
||||||
.get_object(
|
|
||||||
Key=path,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if not resp or "Body" not in resp:
|
if not resp or "Body" not in resp:
|
||||||
return None
|
return None
|
||||||
@ -88,8 +99,7 @@ def get_url(key: str, expires_in=3600) -> str:
|
|||||||
if LOCAL_FILE_UPLOAD:
|
if LOCAL_FILE_UPLOAD:
|
||||||
return URL + "/static/upload/" + key
|
return URL + "/static/upload/" + key
|
||||||
else:
|
else:
|
||||||
s3_client = _session.client("s3")
|
return _get_s3client().generate_presigned_url(
|
||||||
return s3_client.generate_presigned_url(
|
|
||||||
ExpiresIn=expires_in,
|
ExpiresIn=expires_in,
|
||||||
ClientMethod="get_object",
|
ClientMethod="get_object",
|
||||||
Params={"Bucket": BUCKET, "Key": key},
|
Params={"Bucket": BUCKET, "Key": key},
|
||||||
@ -100,5 +110,15 @@ def delete(path: str):
|
|||||||
if LOCAL_FILE_UPLOAD:
|
if LOCAL_FILE_UPLOAD:
|
||||||
os.remove(os.path.join(UPLOAD_DIR, path))
|
os.remove(os.path.join(UPLOAD_DIR, path))
|
||||||
else:
|
else:
|
||||||
o = _session.resource("s3").Bucket(BUCKET).Object(path)
|
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
|
||||||
o.delete()
|
|
||||||
|
|
||||||
|
def create_bucket_if_not_exists():
|
||||||
|
s3client = _get_s3client()
|
||||||
|
buckets = s3client.list_buckets()
|
||||||
|
for bucket in buckets["Buckets"]:
|
||||||
|
if bucket["Name"] == BUCKET:
|
||||||
|
LOG.i("Bucket already exists")
|
||||||
|
return
|
||||||
|
s3client.create_bucket(Bucket=BUCKET)
|
||||||
|
LOG.i(f"Bucket {BUCKET} created")
|
||||||
|
@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
|
|||||||
try:
|
try:
|
||||||
data = pickle.loads(val)
|
data = pickle.loads(val)
|
||||||
return ServerSession(data, session_id=session_id)
|
return ServerSession(data, session_id=session_id)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return ServerSession(session_id=str(uuid.uuid4()))
|
return ServerSession(session_id=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
67
app/cron.py
67
app/cron.py
@ -5,11 +5,11 @@ from typing import List, Tuple
|
|||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import requests
|
import requests
|
||||||
from sqlalchemy import func, desc, or_
|
from sqlalchemy import func, desc, or_, and_
|
||||||
from sqlalchemy.ext.compiler import compiles
|
from sqlalchemy.ext.compiler import compiles
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||||
from sqlalchemy.sql import Insert
|
from sqlalchemy.sql import Insert, text
|
||||||
|
|
||||||
from app import s3, config
|
from app import s3, config
|
||||||
from app.alias_utils import nb_email_log_for_mailbox
|
from app.alias_utils import nb_email_log_for_mailbox
|
||||||
@ -85,23 +85,43 @@ def delete_logs():
|
|||||||
delete_refused_emails()
|
delete_refused_emails()
|
||||||
delete_old_monitoring()
|
delete_old_monitoring()
|
||||||
|
|
||||||
for t in TransactionalEmail.filter(
|
for t_email in TransactionalEmail.filter(
|
||||||
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
||||||
):
|
):
|
||||||
TransactionalEmail.delete(t.id)
|
TransactionalEmail.delete(t_email.id)
|
||||||
|
|
||||||
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
||||||
Bounce.delete(b.id)
|
Bounce.delete(b.id)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
LOG.d("Delete EmailLog older than 2 weeks")
|
LOG.d("Deleting EmailLog older than 2 weeks")
|
||||||
|
|
||||||
max_dt = arrow.now().shift(weeks=-2)
|
total_deleted = 0
|
||||||
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete()
|
batch_size = 500
|
||||||
Session.commit()
|
Session.execute("set session statement_timeout=30000").rowcount
|
||||||
|
queries_done = 0
|
||||||
|
cutoff_time = arrow.now().shift(days=-14)
|
||||||
|
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
|
||||||
|
expected_queries = int(rows_to_delete / batch_size)
|
||||||
|
sql = text(
|
||||||
|
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
|
||||||
|
)
|
||||||
|
str_cutoff_time = cutoff_time.isoformat()
|
||||||
|
while total_deleted < rows_to_delete:
|
||||||
|
deleted_count = Session.execute(
|
||||||
|
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
|
||||||
|
).rowcount
|
||||||
|
Session.commit()
|
||||||
|
total_deleted += deleted_count
|
||||||
|
queries_done += 1
|
||||||
|
LOG.i(
|
||||||
|
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
|
||||||
|
)
|
||||||
|
if deleted_count < batch_size:
|
||||||
|
break
|
||||||
|
|
||||||
LOG.i("Delete %s email logs", nb_deleted)
|
LOG.i("Deleted %s email logs", total_deleted)
|
||||||
|
|
||||||
|
|
||||||
def delete_refused_emails():
|
def delete_refused_emails():
|
||||||
@ -141,7 +161,7 @@ def notify_premium_end():
|
|||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
f"Your subscription will end soon",
|
"Your subscription will end soon",
|
||||||
render(
|
render(
|
||||||
"transactional/subscription-end.txt",
|
"transactional/subscription-end.txt",
|
||||||
user=user,
|
user=user,
|
||||||
@ -198,7 +218,7 @@ def notify_manual_sub_end():
|
|||||||
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
f"Your subscription will end soon",
|
"Your subscription will end soon",
|
||||||
render(
|
render(
|
||||||
"transactional/manual-subscription-end.txt",
|
"transactional/manual-subscription-end.txt",
|
||||||
user=user,
|
user=user,
|
||||||
@ -570,21 +590,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
monitoring_report += "\n====================================\n"
|
monitoring_report += "\n====================================\n"
|
||||||
monitoring_report += f"""
|
monitoring_report += """
|
||||||
# Account bounce report:
|
# Account bounce report:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for email, bounces in bounce_report():
|
for email, bounces in bounce_report():
|
||||||
monitoring_report += f"{email}: {bounces}\n"
|
monitoring_report += f"{email}: {bounces}\n"
|
||||||
|
|
||||||
monitoring_report += f"""\n
|
monitoring_report += """\n
|
||||||
# Alias creation report:
|
# Alias creation report:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for email, nb_alias, date in alias_creation_report():
|
for email, nb_alias, date in alias_creation_report():
|
||||||
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
||||||
|
|
||||||
monitoring_report += f"""\n
|
monitoring_report += """\n
|
||||||
# Full bounce detail report:
|
# Full bounce detail report:
|
||||||
"""
|
"""
|
||||||
monitoring_report += all_bounce_report()
|
monitoring_report += all_bounce_report()
|
||||||
@ -1079,14 +1099,14 @@ def notify_hibp():
|
|||||||
)
|
)
|
||||||
|
|
||||||
LOG.d(
|
LOG.d(
|
||||||
f"Send new breaches found email to %s for %s breaches aliases",
|
"Send new breaches found email to %s for %s breaches aliases",
|
||||||
user,
|
user,
|
||||||
len(breached_aliases),
|
len(breached_aliases),
|
||||||
)
|
)
|
||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
f"You were in a data breach",
|
"You were in a data breach",
|
||||||
render(
|
render(
|
||||||
"transactional/hibp-new-breaches.txt.jinja2",
|
"transactional/hibp-new-breaches.txt.jinja2",
|
||||||
user=user,
|
user=user,
|
||||||
@ -1106,6 +1126,18 @@ def notify_hibp():
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def clear_users_scheduled_to_be_deleted():
|
||||||
|
users = User.filter(
|
||||||
|
and_(User.delete_on.isnot(None), User.delete_on < arrow.now())
|
||||||
|
).all()
|
||||||
|
for user in users:
|
||||||
|
LOG.i(
|
||||||
|
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
|
||||||
|
)
|
||||||
|
User.delete(user.id)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
LOG.d("Start running cronjob")
|
LOG.d("Start running cronjob")
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
@ -1172,3 +1204,6 @@ if __name__ == "__main__":
|
|||||||
elif args.job == "send_undelivered_mails":
|
elif args.job == "send_undelivered_mails":
|
||||||
LOG.d("Sending undelivered emails")
|
LOG.d("Sending undelivered emails")
|
||||||
load_unsent_mails_from_fs_and_resend()
|
load_unsent_mails_from_fs_and_resend()
|
||||||
|
elif args.job == "delete_scheduled_users":
|
||||||
|
LOG.d("Deleting users scheduled to be deleted")
|
||||||
|
clear_users_scheduled_to_be_deleted()
|
||||||
|
@ -61,7 +61,12 @@ jobs:
|
|||||||
schedule: "15 10 * * *"
|
schedule: "15 10 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin delete users scheduled to be deleted
|
||||||
|
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "15 11 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
- name: SimpleLogin send unsent emails
|
- name: SimpleLogin send unsent emails
|
||||||
command: python /code/cron.py -j send_undelivered_mails
|
command: python /code/cron.py -j send_undelivered_mails
|
||||||
|
@ -235,7 +235,6 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||||||
contact.mail_from = mail_from
|
contact.mail_from = mail_from
|
||||||
Session.commit()
|
Session.commit()
|
||||||
else:
|
else:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
contact = Contact.create(
|
contact = Contact.create(
|
||||||
user_id=alias.user_id,
|
user_id=alias.user_id,
|
||||||
@ -637,8 +636,8 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||||||
|
|
||||||
user = alias.user
|
user = alias.user
|
||||||
|
|
||||||
if user.disabled:
|
if not user.can_send_or_receive():
|
||||||
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias)
|
LOG.i(f"User {user} cannot receive emails")
|
||||||
if should_ignore_bounce(envelope.mail_from):
|
if should_ignore_bounce(envelope.mail_from):
|
||||||
return [(True, status.E207)]
|
return [(True, status.E207)]
|
||||||
else:
|
else:
|
||||||
@ -1070,13 +1069,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
user = alias.user
|
user = alias.user
|
||||||
mail_from = envelope.mail_from
|
mail_from = envelope.mail_from
|
||||||
|
|
||||||
if user.disabled:
|
if not user.can_send_or_receive():
|
||||||
LOG.e(
|
LOG.i(f"User {user} cannot send emails")
|
||||||
"User %s disabled, disable sending emails from %s to %s",
|
|
||||||
user,
|
|
||||||
alias,
|
|
||||||
contact,
|
|
||||||
)
|
|
||||||
return False, status.E504
|
return False, status.E504
|
||||||
|
|
||||||
# Check if we need to reject or quarantine based on dmarc
|
# Check if we need to reject or quarantine based on dmarc
|
||||||
@ -1202,7 +1196,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# replace reverse alias by real address for all contacts
|
# replace reverse alias by real address for all contacts
|
||||||
for (reply_email, website_email) in contact_query.values(
|
for reply_email, website_email in contact_query.values(
|
||||||
Contact.reply_email, Contact.website_email
|
Contact.reply_email, Contact.website_email
|
||||||
):
|
):
|
||||||
msg = replace(msg, reply_email, website_email)
|
msg = replace(msg, reply_email, website_email)
|
||||||
@ -1257,7 +1251,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||||
# no need to replace TO header
|
# no need to replace TO header
|
||||||
LOG.d("email is sent in BCC mode")
|
LOG.d("email is sent in BCC mode")
|
||||||
del msg[headers.TO]
|
|
||||||
else:
|
else:
|
||||||
replace_header_when_reply(msg, alias, headers.TO)
|
replace_header_when_reply(msg, alias, headers.TO)
|
||||||
|
|
||||||
@ -1958,7 +1951,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
|||||||
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
||||||
res.append((is_delivered, smtp_status))
|
res.append((is_delivered, smtp_status))
|
||||||
|
|
||||||
for (is_success, smtp_status) in res:
|
for is_success, smtp_status in res:
|
||||||
# Consider all deliveries successful if 1 delivery is successful
|
# Consider all deliveries successful if 1 delivery is successful
|
||||||
if is_success:
|
if is_success:
|
||||||
return smtp_status
|
return smtp_status
|
||||||
@ -2278,7 +2271,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
|||||||
if nb_success > 0 and nb_non_success > 0:
|
if nb_success > 0 and nb_non_success > 0:
|
||||||
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
||||||
|
|
||||||
for (is_success, smtp_status) in res:
|
for is_success, smtp_status in res:
|
||||||
# Consider all deliveries successful if 1 delivery is successful
|
# Consider all deliveries successful if 1 delivery is successful
|
||||||
if is_success:
|
if is_success:
|
||||||
return smtp_status
|
return smtp_status
|
||||||
|
@ -89,7 +89,6 @@ aghast
|
|||||||
agile
|
agile
|
||||||
agility
|
agility
|
||||||
aging
|
aging
|
||||||
agnostic
|
|
||||||
agonize
|
agonize
|
||||||
agonizing
|
agonizing
|
||||||
agony
|
agony
|
||||||
@ -375,8 +374,6 @@ augmented
|
|||||||
august
|
august
|
||||||
authentic
|
authentic
|
||||||
author
|
author
|
||||||
autism
|
|
||||||
autistic
|
|
||||||
autograph
|
autograph
|
||||||
automaker
|
automaker
|
||||||
automated
|
automated
|
||||||
@ -446,7 +443,6 @@ backyard
|
|||||||
bacon
|
bacon
|
||||||
bacteria
|
bacteria
|
||||||
bacterium
|
bacterium
|
||||||
badass
|
|
||||||
badge
|
badge
|
||||||
badland
|
badland
|
||||||
badly
|
badly
|
||||||
@ -1106,7 +1102,6 @@ clinic
|
|||||||
clinking
|
clinking
|
||||||
clip
|
clip
|
||||||
clique
|
clique
|
||||||
cloak
|
|
||||||
clobber
|
clobber
|
||||||
clock
|
clock
|
||||||
clone
|
clone
|
||||||
@ -1776,7 +1771,6 @@ diagnosis
|
|||||||
diagram
|
diagram
|
||||||
dial
|
dial
|
||||||
diameter
|
diameter
|
||||||
diaper
|
|
||||||
diaphragm
|
diaphragm
|
||||||
diary
|
diary
|
||||||
dice
|
dice
|
||||||
@ -2032,9 +2026,6 @@ duffel
|
|||||||
dugout
|
dugout
|
||||||
duh
|
duh
|
||||||
duke
|
duke
|
||||||
duller
|
|
||||||
dullness
|
|
||||||
duly
|
|
||||||
dumping
|
dumping
|
||||||
dumpling
|
dumpling
|
||||||
dumpster
|
dumpster
|
||||||
@ -2527,8 +2518,6 @@ feisty
|
|||||||
feline
|
feline
|
||||||
felt-tip
|
felt-tip
|
||||||
feminine
|
feminine
|
||||||
feminism
|
|
||||||
feminist
|
|
||||||
feminize
|
feminize
|
||||||
femur
|
femur
|
||||||
fence
|
fence
|
||||||
@ -2667,7 +2656,6 @@ fondness
|
|||||||
fondue
|
fondue
|
||||||
font
|
font
|
||||||
food
|
food
|
||||||
fool
|
|
||||||
footage
|
footage
|
||||||
football
|
football
|
||||||
footbath
|
footbath
|
||||||
@ -2777,7 +2765,6 @@ gag
|
|||||||
gainfully
|
gainfully
|
||||||
gaining
|
gaining
|
||||||
gains
|
gains
|
||||||
gala
|
|
||||||
gallantly
|
gallantly
|
||||||
galleria
|
galleria
|
||||||
gallery
|
gallery
|
||||||
@ -3164,8 +3151,6 @@ hardware
|
|||||||
hardwired
|
hardwired
|
||||||
hardwood
|
hardwood
|
||||||
hardy
|
hardy
|
||||||
harmful
|
|
||||||
harmless
|
|
||||||
harmonica
|
harmonica
|
||||||
harmonics
|
harmonics
|
||||||
harmonize
|
harmonize
|
||||||
@ -3340,7 +3325,6 @@ identical
|
|||||||
identify
|
identify
|
||||||
identity
|
identity
|
||||||
ideology
|
ideology
|
||||||
idiocy
|
|
||||||
idiom
|
idiom
|
||||||
idly
|
idly
|
||||||
igloo
|
igloo
|
||||||
@ -3357,7 +3341,6 @@ imaging
|
|||||||
imbecile
|
imbecile
|
||||||
imitate
|
imitate
|
||||||
imitation
|
imitation
|
||||||
immature
|
|
||||||
immerse
|
immerse
|
||||||
immersion
|
immersion
|
||||||
imminent
|
imminent
|
||||||
@ -3387,14 +3370,10 @@ implode
|
|||||||
implosion
|
implosion
|
||||||
implosive
|
implosive
|
||||||
imply
|
imply
|
||||||
impolite
|
|
||||||
important
|
important
|
||||||
importer
|
importer
|
||||||
impose
|
impose
|
||||||
imposing
|
imposing
|
||||||
impotence
|
|
||||||
impotency
|
|
||||||
impotent
|
|
||||||
impound
|
impound
|
||||||
imprecise
|
imprecise
|
||||||
imprint
|
imprint
|
||||||
@ -3424,8 +3403,6 @@ irritable
|
|||||||
irritably
|
irritably
|
||||||
irritant
|
irritant
|
||||||
irritate
|
irritate
|
||||||
islamic
|
|
||||||
islamist
|
|
||||||
isolated
|
isolated
|
||||||
isolating
|
isolating
|
||||||
isolation
|
isolation
|
||||||
@ -3524,7 +3501,6 @@ june
|
|||||||
junior
|
junior
|
||||||
juniper
|
juniper
|
||||||
junkie
|
junkie
|
||||||
junkman
|
|
||||||
junkyard
|
junkyard
|
||||||
jurist
|
jurist
|
||||||
juror
|
juror
|
||||||
@ -3570,9 +3546,6 @@ king
|
|||||||
kinship
|
kinship
|
||||||
kinsman
|
kinsman
|
||||||
kinswoman
|
kinswoman
|
||||||
kissable
|
|
||||||
kisser
|
|
||||||
kissing
|
|
||||||
kitchen
|
kitchen
|
||||||
kite
|
kite
|
||||||
kitten
|
kitten
|
||||||
@ -3649,7 +3622,6 @@ laundry
|
|||||||
laurel
|
laurel
|
||||||
lavender
|
lavender
|
||||||
lavish
|
lavish
|
||||||
laxative
|
|
||||||
lazily
|
lazily
|
||||||
laziness
|
laziness
|
||||||
lazy
|
lazy
|
||||||
@ -3690,7 +3662,6 @@ liable
|
|||||||
liberty
|
liberty
|
||||||
librarian
|
librarian
|
||||||
library
|
library
|
||||||
licking
|
|
||||||
licorice
|
licorice
|
||||||
lid
|
lid
|
||||||
life
|
life
|
||||||
@ -3741,8 +3712,6 @@ livestock
|
|||||||
lividly
|
lividly
|
||||||
living
|
living
|
||||||
lizard
|
lizard
|
||||||
lubricant
|
|
||||||
lubricate
|
|
||||||
lucid
|
lucid
|
||||||
luckily
|
luckily
|
||||||
luckiness
|
luckiness
|
||||||
@ -3878,7 +3847,6 @@ marshland
|
|||||||
marshy
|
marshy
|
||||||
marsupial
|
marsupial
|
||||||
marvelous
|
marvelous
|
||||||
marxism
|
|
||||||
mascot
|
mascot
|
||||||
masculine
|
masculine
|
||||||
mashed
|
mashed
|
||||||
@ -3914,8 +3882,6 @@ maximum
|
|||||||
maybe
|
maybe
|
||||||
mayday
|
mayday
|
||||||
mayflower
|
mayflower
|
||||||
moaner
|
|
||||||
moaning
|
|
||||||
mobile
|
mobile
|
||||||
mobility
|
mobility
|
||||||
mobilize
|
mobilize
|
||||||
@ -4124,7 +4090,6 @@ nemeses
|
|||||||
nemesis
|
nemesis
|
||||||
neon
|
neon
|
||||||
nephew
|
nephew
|
||||||
nerd
|
|
||||||
nervous
|
nervous
|
||||||
nervy
|
nervy
|
||||||
nest
|
nest
|
||||||
@ -4139,7 +4104,6 @@ never
|
|||||||
next
|
next
|
||||||
nibble
|
nibble
|
||||||
nickname
|
nickname
|
||||||
nicotine
|
|
||||||
niece
|
niece
|
||||||
nifty
|
nifty
|
||||||
nimble
|
nimble
|
||||||
@ -4167,14 +4131,10 @@ nuptials
|
|||||||
nursery
|
nursery
|
||||||
nursing
|
nursing
|
||||||
nurture
|
nurture
|
||||||
nutcase
|
|
||||||
nutlike
|
nutlike
|
||||||
nutmeg
|
nutmeg
|
||||||
nutrient
|
nutrient
|
||||||
nutshell
|
nutshell
|
||||||
nuttiness
|
|
||||||
nutty
|
|
||||||
nuzzle
|
|
||||||
nylon
|
nylon
|
||||||
oaf
|
oaf
|
||||||
oak
|
oak
|
||||||
@ -4205,7 +4165,6 @@ obstinate
|
|||||||
obstruct
|
obstruct
|
||||||
obtain
|
obtain
|
||||||
obtrusive
|
obtrusive
|
||||||
obtuse
|
|
||||||
obvious
|
obvious
|
||||||
occultist
|
occultist
|
||||||
occupancy
|
occupancy
|
||||||
@ -4446,7 +4405,6 @@ palpitate
|
|||||||
paltry
|
paltry
|
||||||
pampered
|
pampered
|
||||||
pamperer
|
pamperer
|
||||||
pampers
|
|
||||||
pamphlet
|
pamphlet
|
||||||
panama
|
panama
|
||||||
pancake
|
pancake
|
||||||
@ -4651,7 +4609,6 @@ plated
|
|||||||
platform
|
platform
|
||||||
plating
|
plating
|
||||||
platinum
|
platinum
|
||||||
platonic
|
|
||||||
platter
|
platter
|
||||||
platypus
|
platypus
|
||||||
plausible
|
plausible
|
||||||
@ -4777,8 +4734,6 @@ prancing
|
|||||||
pranker
|
pranker
|
||||||
prankish
|
prankish
|
||||||
prankster
|
prankster
|
||||||
prayer
|
|
||||||
praying
|
|
||||||
preacher
|
preacher
|
||||||
preaching
|
preaching
|
||||||
preachy
|
preachy
|
||||||
@ -4796,8 +4751,6 @@ prefix
|
|||||||
preflight
|
preflight
|
||||||
preformed
|
preformed
|
||||||
pregame
|
pregame
|
||||||
pregnancy
|
|
||||||
pregnant
|
|
||||||
preheated
|
preheated
|
||||||
prelaunch
|
prelaunch
|
||||||
prelaw
|
prelaw
|
||||||
@ -4937,7 +4890,6 @@ prudishly
|
|||||||
prune
|
prune
|
||||||
pruning
|
pruning
|
||||||
pry
|
pry
|
||||||
psychic
|
|
||||||
public
|
public
|
||||||
publisher
|
publisher
|
||||||
pucker
|
pucker
|
||||||
@ -4957,8 +4909,7 @@ punctual
|
|||||||
punctuate
|
punctuate
|
||||||
punctured
|
punctured
|
||||||
pungent
|
pungent
|
||||||
punisher
|
punishe
|
||||||
punk
|
|
||||||
pupil
|
pupil
|
||||||
puppet
|
puppet
|
||||||
puppy
|
puppy
|
||||||
@ -5040,7 +4991,6 @@ quote
|
|||||||
rabid
|
rabid
|
||||||
race
|
race
|
||||||
racing
|
racing
|
||||||
racism
|
|
||||||
rack
|
rack
|
||||||
racoon
|
racoon
|
||||||
radar
|
radar
|
||||||
@ -5155,7 +5105,6 @@ recount
|
|||||||
recoup
|
recoup
|
||||||
recovery
|
recovery
|
||||||
recreate
|
recreate
|
||||||
rectal
|
|
||||||
rectangle
|
rectangle
|
||||||
rectified
|
rectified
|
||||||
rectify
|
rectify
|
||||||
@ -5622,7 +5571,6 @@ sarcastic
|
|||||||
sardine
|
sardine
|
||||||
sash
|
sash
|
||||||
sasquatch
|
sasquatch
|
||||||
sassy
|
|
||||||
satchel
|
satchel
|
||||||
satiable
|
satiable
|
||||||
satin
|
satin
|
||||||
@ -5651,7 +5599,6 @@ scaling
|
|||||||
scallion
|
scallion
|
||||||
scallop
|
scallop
|
||||||
scalping
|
scalping
|
||||||
scam
|
|
||||||
scandal
|
scandal
|
||||||
scanner
|
scanner
|
||||||
scanning
|
scanning
|
||||||
@ -5928,8 +5875,6 @@ silent
|
|||||||
silica
|
silica
|
||||||
silicon
|
silicon
|
||||||
silk
|
silk
|
||||||
silliness
|
|
||||||
silly
|
|
||||||
silo
|
silo
|
||||||
silt
|
silt
|
||||||
silver
|
silver
|
||||||
@ -5991,7 +5936,6 @@ skimmer
|
|||||||
skimming
|
skimming
|
||||||
skimpily
|
skimpily
|
||||||
skincare
|
skincare
|
||||||
skinhead
|
|
||||||
skinless
|
skinless
|
||||||
skinning
|
skinning
|
||||||
skinny
|
skinny
|
||||||
@ -6197,7 +6141,6 @@ splinter
|
|||||||
splotchy
|
splotchy
|
||||||
splurge
|
splurge
|
||||||
spoilage
|
spoilage
|
||||||
spoiled
|
|
||||||
spoiler
|
spoiler
|
||||||
spoiling
|
spoiling
|
||||||
spoils
|
spoils
|
||||||
@ -7079,7 +7022,6 @@ undocked
|
|||||||
undoing
|
undoing
|
||||||
undone
|
undone
|
||||||
undrafted
|
undrafted
|
||||||
undress
|
|
||||||
undrilled
|
undrilled
|
||||||
undusted
|
undusted
|
||||||
undying
|
undying
|
||||||
|
@ -158677,16 +158677,6 @@ isis
|
|||||||
isize
|
isize
|
||||||
isl
|
isl
|
||||||
islay
|
islay
|
||||||
islam
|
|
||||||
islamic
|
|
||||||
islamism
|
|
||||||
islamist
|
|
||||||
islamistic
|
|
||||||
islamite
|
|
||||||
islamitic
|
|
||||||
islamitish
|
|
||||||
islamization
|
|
||||||
islamize
|
|
||||||
island
|
island
|
||||||
islanded
|
islanded
|
||||||
islander
|
islander
|
||||||
|
33
app/migrations/versions/2023_090715_0a5701a4f5e4_.py
Normal file
33
app/migrations/versions/2023_090715_0a5701a4f5e4_.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 0a5701a4f5e4
|
||||||
|
Revises: 01827104004b
|
||||||
|
Create Date: 2023-09-07 15:28:10.122756
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '0a5701a4f5e4'
|
||||||
|
down_revision = '01827104004b'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('users', sa.Column('delete_on', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True))
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_users_delete_on', 'users', ['delete_on'], unique=False, postgresql_concurrently=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_users_delete_on', table_name='users', postgresql_concurrently=True)
|
||||||
|
op.drop_column('users', 'delete_on')
|
||||||
|
# ### end Alembic commands ###
|
34
app/migrations/versions/2023_092818_ec7fdde8da9f_.py
Normal file
34
app/migrations/versions/2023_092818_ec7fdde8da9f_.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: ec7fdde8da9f
|
||||||
|
Revises: 0a5701a4f5e4
|
||||||
|
Create Date: 2023-09-28 18:09:48.016620
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "ec7fdde8da9f"
|
||||||
|
down_revision = "0a5701a4f5e4"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index(
|
||||||
|
"ix_email_log_created_at", "email_log", ["created_at"], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index("ix_email_log_created_at", table_name="email_log")
|
||||||
|
# ### end Alembic commands ###
|
39
app/migrations/versions/2023_100510_46ecb648a47e_.py
Normal file
39
app/migrations/versions/2023_100510_46ecb648a47e_.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 46ecb648a47e
|
||||||
|
Revises: ec7fdde8da9f
|
||||||
|
Create Date: 2023-10-05 10:43:35.668902
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "46ecb648a47e"
|
||||||
|
down_revision = "ec7fdde8da9f"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_message_id_matching_email_log_id"),
|
||||||
|
"message_id_matching",
|
||||||
|
["email_log_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_message_id_matching_email_log_id"),
|
||||||
|
table_name="message_id_matching",
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
31
app/migrations/versions/2023_110714_4bc54632d9aa_.py
Normal file
31
app/migrations/versions/2023_110714_4bc54632d9aa_.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 4bc54632d9aa
|
||||||
|
Revises: 46ecb648a47e
|
||||||
|
Create Date: 2023-11-07 14:02:17.610226
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '4bc54632d9aa'
|
||||||
|
down_revision = '46ecb648a47e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index('ix_newsletter_subject', table_name='newsletter')
|
||||||
|
op.create_index(op.f('ix_newsletter_subject'), 'newsletter', ['subject'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f('ix_newsletter_subject'), table_name='newsletter')
|
||||||
|
op.create_index('ix_newsletter_subject', 'newsletter', ['subject'], unique=True)
|
||||||
|
# ### end Alembic commands ###
|
312
app/poetry.lock
generated
312
app/poetry.lock
generated
@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohttp"
|
name = "aiohttp"
|
||||||
@ -99,12 +99,10 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
aiosignal = ">=1.1.2"
|
aiosignal = ">=1.1.2"
|
||||||
async-timeout = ">=4.0.0a3,<5.0"
|
async-timeout = ">=4.0.0a3,<5.0"
|
||||||
asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""}
|
|
||||||
attrs = ">=17.3.0"
|
attrs = ">=17.3.0"
|
||||||
charset-normalizer = ">=2.0,<4.0"
|
charset-normalizer = ">=2.0,<4.0"
|
||||||
frozenlist = ">=1.1.1"
|
frozenlist = ">=1.1.1"
|
||||||
multidict = ">=4.5,<7.0"
|
multidict = ">=4.5,<7.0"
|
||||||
typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
|
|
||||||
yarl = ">=1.0,<2.0"
|
yarl = ">=1.0,<2.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@ -138,7 +136,6 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
atpublic = "*"
|
atpublic = "*"
|
||||||
attrs = "*"
|
attrs = "*"
|
||||||
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiosmtplib"
|
name = "aiosmtplib"
|
||||||
@ -157,17 +154,20 @@ uvloop = ["uvloop (>=0.13,<0.15)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiospamc"
|
name = "aiospamc"
|
||||||
version = "0.6.1"
|
version = "0.10.0"
|
||||||
description = "An asyncio-based library to communicate with SpamAssassin's SPAMD service."
|
description = "An asyncio-based library to communicate with SpamAssassin's SPAMD service."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5,<4.0"
|
python-versions = ">=3.8,<4.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "aiospamc-0.6.1-py3-none-any.whl", hash = "sha256:63b7d213d6af01058b855ddcde2147485ea4e685d6d13ee682ad12cb1fa87ca6"},
|
{file = "aiospamc-0.10.0-py3-none-any.whl", hash = "sha256:53381adc53814647608ec864263eb975cf9bf04370f16adc2e1c1fa7aca2f538"},
|
||||||
{file = "aiospamc-0.6.1.tar.gz", hash = "sha256:4923bf3d1bf5a07151a3a9ea8be7862d9dcdef37a858035668ad1c726b7b98c1"},
|
{file = "aiospamc-0.10.0.tar.gz", hash = "sha256:a31abdbd809c7f74352e03166ec98685677a97ed8d1cbbbd6e1274cb8919c0d4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = ">=2019.9,<2020.0"
|
certifi = "*"
|
||||||
|
loguru = ">=0.7.0,<0.8.0"
|
||||||
|
typer = ">=0.9.0,<0.10.0"
|
||||||
|
typing-extensions = ">=4.6.2,<5.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alembic"
|
name = "alembic"
|
||||||
@ -225,8 +225,6 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
lazy-object-proxy = ">=1.4.0"
|
lazy-object-proxy = ">=1.4.0"
|
||||||
setuptools = ">=20.0"
|
setuptools = ">=20.0"
|
||||||
typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
|
|
||||||
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
|
|
||||||
wrapt = ">=1.11,<2"
|
wrapt = ">=1.11,<2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -240,20 +238,6 @@ files = [
|
|||||||
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
|
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "asynctest"
|
|
||||||
version = "0.13.0"
|
|
||||||
description = "Enhance the standard unittest package with features for testing asyncio libraries"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
files = [
|
|
||||||
{file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
|
|
||||||
{file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atpublic"
|
name = "atpublic"
|
||||||
version = "2.0"
|
version = "2.0"
|
||||||
@ -264,9 +248,6 @@ files = [
|
|||||||
{file = "atpublic-2.0.tar.gz", hash = "sha256:ebeb62b71a5c683a84c1b16bbf415708af5a46841b142b85ac3a22ec2d7613b0"},
|
{file = "atpublic-2.0.tar.gz", hash = "sha256:ebeb62b71a5c683a84c1b16bbf415708af5a46841b142b85ac3a22ec2d7613b0"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing_extensions = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "20.2.0"
|
version = "20.2.0"
|
||||||
@ -306,9 +287,6 @@ files = [
|
|||||||
{file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"},
|
{file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
|
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||||
testing = ["pytest", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
testing = ["pytest", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
||||||
@ -378,8 +356,6 @@ mypy-extensions = ">=0.4.3"
|
|||||||
pathspec = ">=0.9.0"
|
pathspec = ">=0.9.0"
|
||||||
platformdirs = ">=2"
|
platformdirs = ">=2"
|
||||||
tomli = ">=1.1.0"
|
tomli = ">=1.1.0"
|
||||||
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
|
|
||||||
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
colorama = ["colorama (>=0.4.3)"]
|
colorama = ["colorama (>=0.4.3)"]
|
||||||
@ -562,7 +538,6 @@ files = [
|
|||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "coinbase-commerce"
|
name = "coinbase-commerce"
|
||||||
@ -1192,12 +1167,12 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "future"
|
name = "future"
|
||||||
version = "0.18.2"
|
version = "0.18.3"
|
||||||
description = "Clean single-source support for Python 3 and 2"
|
description = "Clean single-source support for Python 3 and 2"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||||
files = [
|
files = [
|
||||||
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
|
{file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1383,6 +1358,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
|
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
|
||||||
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
|
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
|
||||||
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
|
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
|
||||||
|
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
|
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
|
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
|
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
|
||||||
@ -1391,6 +1367,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
|
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
|
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
|
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
|
||||||
|
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
|
||||||
@ -1420,6 +1397,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
|
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
|
||||||
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
|
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
|
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
|
||||||
|
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
|
||||||
@ -1428,6 +1406,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
|
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
|
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
|
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
|
||||||
|
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
|
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
|
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
|
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
|
||||||
@ -1488,15 +1467,18 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httplib2"
|
name = "httplib2"
|
||||||
version = "0.18.1"
|
version = "0.22.0"
|
||||||
description = "A comprehensive HTTP client library."
|
description = "A comprehensive HTTP client library."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
files = [
|
files = [
|
||||||
{file = "httplib2-0.18.1-py3-none-any.whl", hash = "sha256:ca2914b015b6247791c4866782fa6042f495b94401a0f0bd3e1d6e0ba2236782"},
|
{file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"},
|
||||||
{file = "httplib2-0.18.1.tar.gz", hash = "sha256:8af66c1c52c7ffe1aa5dc4bcd7c769885254b0756e6e69f953c7f0ab49a70ba3"},
|
{file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "humanfriendly"
|
name = "humanfriendly"
|
||||||
version = "8.2"
|
version = "8.2"
|
||||||
@ -1548,7 +1530,6 @@ files = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
|
||||||
zipp = ">=0.5"
|
zipp = ">=0.5"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@ -1763,14 +1744,32 @@ files = [
|
|||||||
six = ">=1.4.1"
|
six = ">=1.4.1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mako"
|
name = "loguru"
|
||||||
version = "1.1.3"
|
version = "0.7.2"
|
||||||
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
|
description = "Python logging made (stupidly) simple"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
python-versions = ">=3.5"
|
||||||
files = [
|
files = [
|
||||||
{file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"},
|
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
|
||||||
{file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"},
|
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
|
||||||
|
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mako"
|
||||||
|
version = "1.2.4"
|
||||||
|
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"},
|
||||||
|
{file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -1779,6 +1778,7 @@ MarkupSafe = ">=0.9.2"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
babel = ["Babel"]
|
babel = ["Babel"]
|
||||||
lingua = ["lingua"]
|
lingua = ["lingua"]
|
||||||
|
testing = ["pytest"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "markupsafe"
|
name = "markupsafe"
|
||||||
@ -2101,9 +2101,6 @@ files = [
|
|||||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["pre-commit", "tox"]
|
dev = ["pre-commit", "tox"]
|
||||||
|
|
||||||
@ -2132,7 +2129,6 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
cfgv = ">=2.0.0"
|
cfgv = ">=2.0.0"
|
||||||
identify = ">=1.0.0"
|
identify = ">=1.0.0"
|
||||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
nodeenv = ">=0.11.1"
|
nodeenv = ">=0.11.1"
|
||||||
pyyaml = ">=5.1"
|
pyyaml = ">=5.1"
|
||||||
toml = "*"
|
toml = "*"
|
||||||
@ -2154,36 +2150,26 @@ wcwidth = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "protobuf"
|
name = "protobuf"
|
||||||
version = "3.15.0"
|
version = "4.24.3"
|
||||||
description = "Protocol Buffers"
|
description = ""
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "protobuf-3.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:44d824adb48fe8baf81e628c2edaf9911912cd592a83621d2b877ccfde631d61"},
|
{file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
|
||||||
{file = "protobuf-3.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:b04449133e31b65924650d758efbc2397c2d0e5eb3c8cae7428ffc4fa9c3403d"},
|
{file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
|
||||||
{file = "protobuf-3.15.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:ef69a10d45529a08367e70e736b3ce8e2af51360f23650ef1d4381ff9038467a"},
|
{file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
|
||||||
{file = "protobuf-3.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:50f28efa66232a2fbbdd638dd61d9399ff66bcfde40ff305263b229692928081"},
|
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
|
||||||
{file = "protobuf-3.15.0-cp35-cp35m-win32.whl", hash = "sha256:25f0ee57684f7bc3f0511b73cf55c016a891d09079c357794759663fe3da9cd3"},
|
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
|
||||||
{file = "protobuf-3.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:94b34486986d7683e83f9d02a0112533263fc20fae54fff3f4fd69451e682ec7"},
|
{file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
|
||||||
{file = "protobuf-3.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:11f192d491613f692b3ddc18f06c925785b3019c8e35d32c811421ca9ff7d50e"},
|
{file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
|
||||||
{file = "protobuf-3.15.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:867635c1d541ce336a1a4df3379d1116f02eba6dc326d080c8ef02f34036c415"},
|
{file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
|
||||||
{file = "protobuf-3.15.0-cp36-cp36m-win32.whl", hash = "sha256:f6d10b1f86cebb8008a256f474948fc6204391e02a9c12935eebf036bbb07b65"},
|
{file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
|
||||||
{file = "protobuf-3.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5c2ee13f5ea237a17bd81f52f972b7d334c0a43330d2a2a7b25b07f16eb146d8"},
|
{file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
|
||||||
{file = "protobuf-3.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2ccc0169b5145b3af676b6997be6fe62961edfc12bb524a7b9c46fb5d208a3d4"},
|
{file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
|
||||||
{file = "protobuf-3.15.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:51e080fb1de5db54b0a6b1519ba8dda55e57404b0a4948e58f1342a3e15d89ec"},
|
{file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
|
||||||
{file = "protobuf-3.15.0-cp37-cp37m-win32.whl", hash = "sha256:d892e487bd544463ce1e656434591593f710169335ac3f02ce30ee866c2f2464"},
|
{file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
|
||||||
{file = "protobuf-3.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:40f031f79b0254aa62082ca87776c0959d85adf99f09cdef9d0b320bb772a609"},
|
|
||||||
{file = "protobuf-3.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ae4bcd5a0ce3f77d0523c3e5ed0d04ed2af454f7bf7cef08cb7a8d0915ac80a9"},
|
|
||||||
{file = "protobuf-3.15.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:830a9c71df347b3fb3cd24ec985c4ed64f6e75983f543a1d8a3c96302dae915c"},
|
|
||||||
{file = "protobuf-3.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fecf1b00ccc87bb8debca8b56458cc57c486d2d7afe22c7526728f79ffe232f4"},
|
|
||||||
{file = "protobuf-3.15.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0e00b4e4a4800b389ae7f0058e1fc9d012444fdde926569d8cce55c84a01ef74"},
|
|
||||||
{file = "protobuf-3.15.0-py2.py3-none-any.whl", hash = "sha256:013a9ec4dccad9a6ed3aa1ad9e86a25a4e0d6d3bbe059b6f6502db20473c3e69"},
|
|
||||||
{file = "protobuf-3.15.0.tar.gz", hash = "sha256:e9f13fadb15b80e4a83ef5d9fa44e19243b1e2d96e84ee2228ca305180ca059e"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
six = ">=1.9"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psutil"
|
name = "psutil"
|
||||||
version = "5.7.2"
|
version = "5.7.2"
|
||||||
@ -2414,7 +2400,6 @@ mccabe = ">=0.6,<0.8"
|
|||||||
platformdirs = ">=2.2.0"
|
platformdirs = ">=2.2.0"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||||
tomlkit = ">=0.10.1"
|
tomlkit = ">=0.10.1"
|
||||||
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
spelling = ["pyenchant (>=3.2,<4.0)"]
|
spelling = ["pyenchant (>=3.2,<4.0)"]
|
||||||
@ -2525,7 +2510,6 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||||
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
|
|
||||||
iniconfig = "*"
|
iniconfig = "*"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
pluggy = ">=0.12,<2.0"
|
pluggy = ">=0.12,<2.0"
|
||||||
@ -2665,19 +2649,17 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redis"
|
name = "redis"
|
||||||
version = "4.5.3"
|
version = "4.6.0"
|
||||||
description = "Python client for Redis database and key-value store"
|
description = "Python client for Redis database and key-value store"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "redis-4.5.3-py3-none-any.whl", hash = "sha256:7df17a0a2b72a4c8895b462dd07616c51b1dcb48fdd7ecb7b6f4bf39ecb2e94e"},
|
{file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"},
|
||||||
{file = "redis-4.5.3.tar.gz", hash = "sha256:56732e156fe31801c4f43396bd3ca0c2a7f6f83d7936798531b9848d103381aa"},
|
{file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
async-timeout = {version = ">=4.0.2", markers = "python_version < \"3.11\""}
|
async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""}
|
||||||
importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""}
|
|
||||||
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
hiredis = ["hiredis (>=1.0.0)"]
|
hiredis = ["hiredis (>=1.0.0)"]
|
||||||
@ -2768,24 +2750,24 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests"
|
name = "requests"
|
||||||
version = "2.25.1"
|
version = "2.31.0"
|
||||||
description = "Python HTTP for Humans."
|
description = "Python HTTP for Humans."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"},
|
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||||
{file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"},
|
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = ">=2017.4.17"
|
certifi = ">=2017.4.17"
|
||||||
chardet = ">=3.0.2,<5"
|
charset-normalizer = ">=2,<4"
|
||||||
idna = ">=2.5,<3"
|
idna = ">=2.5,<4"
|
||||||
urllib3 = ">=1.21.1,<1.27"
|
urllib3 = ">=1.21.1,<3"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
security = ["cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests-file"
|
name = "requests-file"
|
||||||
@ -2845,51 +2827,34 @@ files = [
|
|||||||
{file = "ruamel.yaml-0.16.12.tar.gz", hash = "sha256:076cc0bc34f1966d920a49f18b52b6ad559fbe656a0748e3535cf7b3f29ebf9e"},
|
{file = "ruamel.yaml-0.16.12.tar.gz", hash = "sha256:076cc0bc34f1966d920a49f18b52b6ad559fbe656a0748e3535cf7b3f29ebf9e"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.9\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["ryd"]
|
docs = ["ryd"]
|
||||||
jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
|
jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruamel.yaml.clib"
|
name = "ruff"
|
||||||
version = "0.2.2"
|
version = "0.1.5"
|
||||||
description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
|
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"},
|
{file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1"},
|
{file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win32.whl", hash = "sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win_amd64.whl", hash = "sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win32.whl", hash = "sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f"},
|
{file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62"},
|
{file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c"},
|
{file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988"},
|
{file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3"},
|
{file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2"},
|
{file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91"},
|
{file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6"},
|
{file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e"},
|
{file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"},
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win32.whl", hash = "sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win32.whl", hash = "sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win32.whl", hash = "sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb"},
|
|
||||||
{file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3106,15 +3071,20 @@ url = ["furl (>=0.4.1)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlparse"
|
name = "sqlparse"
|
||||||
version = "0.4.2"
|
version = "0.4.4"
|
||||||
description = "A non-validating SQL parser."
|
description = "A non-validating SQL parser."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.5"
|
||||||
files = [
|
files = [
|
||||||
{file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"},
|
{file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
|
||||||
{file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"},
|
{file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["build", "flake8"]
|
||||||
|
doc = ["sphinx"]
|
||||||
|
test = ["pytest", "pytest-cov"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strictyaml"
|
name = "strictyaml"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
@ -3249,47 +3219,35 @@ pytz = "*"
|
|||||||
requests = ">=2.0.0"
|
requests = ">=2.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typed-ast"
|
name = "typer"
|
||||||
version = "1.5.2"
|
version = "0.9.0"
|
||||||
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"},
|
{file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"},
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"},
|
{file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"},
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"},
|
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"},
|
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"},
|
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"},
|
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"},
|
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"},
|
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"},
|
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"},
|
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"},
|
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"},
|
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"},
|
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"},
|
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"},
|
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"},
|
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"},
|
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"},
|
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"},
|
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"},
|
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"},
|
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"},
|
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"},
|
|
||||||
{file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=7.1.1,<9.0.0"
|
||||||
|
typing-extensions = ">=3.7.4.3"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||||
|
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
|
||||||
|
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
|
||||||
|
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typing-extensions"
|
name = "typing-extensions"
|
||||||
version = "4.0.1"
|
version = "4.8.0"
|
||||||
description = "Backported and Experimental Type Hints for Python 3.6+"
|
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"},
|
{file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
|
||||||
{file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"},
|
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3345,7 +3303,6 @@ files = [
|
|||||||
"backports.entry-points-selectable" = ">=1.0.4"
|
"backports.entry-points-selectable" = ">=1.0.4"
|
||||||
distlib = ">=0.3.1,<1"
|
distlib = ">=0.3.1,<1"
|
||||||
filelock = ">=3.0.0,<4"
|
filelock = ">=3.0.0,<4"
|
||||||
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
|
|
||||||
platformdirs = ">=2,<3"
|
platformdirs = ">=2,<3"
|
||||||
six = ">=1.9.0,<2"
|
six = ">=1.9.0,<2"
|
||||||
|
|
||||||
@ -3426,6 +3383,20 @@ files = [
|
|||||||
dev = ["coverage", "pallets-sphinx-themes", "pytest", "pytest-timeout", "sphinx", "sphinx-issues", "tox"]
|
dev = ["coverage", "pallets-sphinx-themes", "pytest", "pytest-timeout", "sphinx", "sphinx-issues", "tox"]
|
||||||
watchdog = ["watchdog"]
|
watchdog = ["watchdog"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "win32-setctime"
|
||||||
|
version = "1.1.0"
|
||||||
|
description = "A small Python utility to set file creation time on Windows"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
files = [
|
||||||
|
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
|
||||||
|
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wrapt"
|
name = "wrapt"
|
||||||
version = "1.15.0"
|
version = "1.15.0"
|
||||||
@ -3635,7 +3606,6 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
idna = ">=2.0"
|
idna = ">=2.0"
|
||||||
multidict = ">=4.0"
|
multidict = ">=4.0"
|
||||||
typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zipp"
|
name = "zipp"
|
||||||
@ -3729,5 +3699,5 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
|||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.7.2"
|
python-versions = "^3.10"
|
||||||
content-hash = "9cf184eded5a8fb41f7725ff5ed0f26ad5bbd44b9d59a9180abb4c6bf3fe278a"
|
content-hash = "01afc410d21eeac0a0ac7e8ef6eeb0a991cf4bc091c3351049263462e205ff63"
|
||||||
|
@ -18,6 +18,9 @@ exclude = '''
|
|||||||
)
|
)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
ignore-init-module-imports = true
|
||||||
|
|
||||||
[tool.djlint]
|
[tool.djlint]
|
||||||
indent = 2
|
indent = 2
|
||||||
profile = "jinja"
|
profile = "jinja"
|
||||||
@ -53,7 +56,7 @@ packages = [
|
|||||||
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.7.2"
|
python = "^3.10"
|
||||||
flask = "^1.1.2"
|
flask = "^1.1.2"
|
||||||
flask_login = "^0.5.0"
|
flask_login = "^0.5.0"
|
||||||
wtforms = "^2.3.3"
|
wtforms = "^2.3.3"
|
||||||
@ -96,7 +99,6 @@ pyspf = "^2.0.14"
|
|||||||
Flask-Limiter = "^1.4"
|
Flask-Limiter = "^1.4"
|
||||||
memory_profiler = "^0.57.0"
|
memory_profiler = "^0.57.0"
|
||||||
gevent = "22.10.2"
|
gevent = "22.10.2"
|
||||||
aiospamc = "^0.6.1"
|
|
||||||
email_validator = "^1.1.1"
|
email_validator = "^1.1.1"
|
||||||
PGPy = "0.5.4"
|
PGPy = "0.5.4"
|
||||||
coinbase-commerce = "^1.0.1"
|
coinbase-commerce = "^1.0.1"
|
||||||
@ -112,6 +114,7 @@ cryptography = "37.0.1"
|
|||||||
SQLAlchemy = "1.3.24"
|
SQLAlchemy = "1.3.24"
|
||||||
redis = "^4.5.3"
|
redis = "^4.5.3"
|
||||||
newrelic-telemetry-sdk = "^0.5.0"
|
newrelic-telemetry-sdk = "^0.5.0"
|
||||||
|
aiospamc = "0.10"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^7.0.0"
|
pytest = "^7.0.0"
|
||||||
@ -121,6 +124,9 @@ black = "^22.1.0"
|
|||||||
djlint = "^1.3.0"
|
djlint = "^1.3.0"
|
||||||
pylint = "^2.14.4"
|
pylint = "^2.14.4"
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
ruff = "^0.1.5"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry>=0.12"]
|
requires = ["poetry>=0.12"]
|
||||||
build-backend = "poetry.masonry.api"
|
build-backend = "poetry.masonry.api"
|
||||||
|
@ -407,8 +407,10 @@ def jinja2_filter(app):
|
|||||||
|
|
||||||
@app.context_processor
|
@app.context_processor
|
||||||
def inject_stage_and_region():
|
def inject_stage_and_region():
|
||||||
|
now = arrow.now()
|
||||||
return dict(
|
return dict(
|
||||||
YEAR=arrow.now().year,
|
YEAR=now.year,
|
||||||
|
NOW=now,
|
||||||
URL=URL,
|
URL=URL,
|
||||||
SENTRY_DSN=SENTRY_FRONT_END_DSN,
|
SENTRY_DSN=SENTRY_FRONT_END_DSN,
|
||||||
VERSION=SHA1,
|
VERSION=SHA1,
|
||||||
@ -641,7 +643,7 @@ def setup_paddle_callback(app: Flask):
|
|||||||
|
|
||||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
||||||
def paddle_coupon():
|
def paddle_coupon():
|
||||||
LOG.d(f"paddle coupon callback %s", request.form)
|
LOG.d("paddle coupon callback %s", request.form)
|
||||||
|
|
||||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
from time import sleep
|
|
||||||
|
|
||||||
import flask_migrate
|
import flask_migrate
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
from sqlalchemy_utils import create_database, database_exists, drop_database
|
from sqlalchemy_utils import create_database, database_exists, drop_database
|
||||||
|
|
||||||
from app import models
|
from app import models
|
||||||
from app.config import DB_URI
|
from app.config import DB_URI
|
||||||
from app.models import *
|
from app.db import Session
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import User, RecoveryCode
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# noinspection PyUnreachableCode
|
# noinspection PyUnreachableCode
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 38 KiB |
@ -86,6 +86,12 @@
|
|||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="page">
|
<div class="page">
|
||||||
|
{% if NOW.timestamp < 1701475201 and current_user.is_authenticated and current_user.should_show_upgrade_button() %}
|
||||||
|
|
||||||
|
<div class="alert alert-success text-center mb-0" role="alert">
|
||||||
|
Black Friday: $20 for the first year instead of $30. Available until December 1st.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
{% block announcement %}{% endblock %}
|
{% block announcement %}{% endblock %}
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<!-- For flash messages -->
|
<!-- For flash messages -->
|
||||||
|
@ -93,6 +93,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col p-1">
|
<div class="col p-1">
|
||||||
|
{{ csrf_form.csrf_token }}
|
||||||
<button type="submit" id="create" class="btn btn-primary mt-1">Create</button>
|
<button type="submit" id="create" class="btn btn-primary mt-1">Create</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -268,7 +268,7 @@
|
|||||||
If you are using a subdomain, e.g. <i>subdomain.domain.com</i>,
|
If you are using a subdomain, e.g. <i>subdomain.domain.com</i>,
|
||||||
you need to use <i>dkim._domainkey.subdomain</i> as the domain instead.
|
you need to use <i>dkim._domainkey.subdomain</i> as the domain instead.
|
||||||
<br />
|
<br />
|
||||||
That means, if your domain is <i>mail.domain.com</i> you should enter <i>dkim._domainkey.mail.domain.com</i> as the Domain.
|
That means, if your domain is <i>mail.domain.com</i> you should enter <i>dkim._domainkey.mail</i> as the Domain.
|
||||||
<br />
|
<br />
|
||||||
</div>
|
</div>
|
||||||
<div class="alert alert-info">
|
<div class="alert alert-info">
|
||||||
|
@ -57,6 +57,22 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block default_content %}
|
{% block default_content %}
|
||||||
|
|
||||||
|
{% if NOW.timestamp < 1701475201 %}
|
||||||
|
|
||||||
|
<div class="alert alert-info">
|
||||||
|
Black Friday Deal: 33% off on the yearly plan for the <b>first</b> year ($20 instead of $30).
|
||||||
|
<br>
|
||||||
|
Please use this coupon code
|
||||||
|
<em data-toggle="tooltip"
|
||||||
|
title="Click to copy"
|
||||||
|
class="clipboard"
|
||||||
|
data-clipboard-text="BF2023">BF2023</em> during the checkout.
|
||||||
|
<br>
|
||||||
|
<img src="/static/images/coupon.png" class="m-2" style="max-width: 300px">
|
||||||
|
<br>
|
||||||
|
Available until December 1, 2023.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
<div class="pb-8">
|
<div class="pb-8">
|
||||||
<div class="text-center mx-md-auto mb-8 mt-6">
|
<div class="text-center mx-md-auto mb-8 mt-6">
|
||||||
<h1>Upgrade to unlock premium features</h1>
|
<h1>Upgrade to unlock premium features</h1>
|
||||||
|
@ -58,7 +58,7 @@ def test_different_scenarios_v4_2(flask_client):
|
|||||||
assert r.json["suffixes"]
|
assert r.json["suffixes"]
|
||||||
assert r.json["prefix_suggestion"] == "" # no hostname => no suggestion
|
assert r.json["prefix_suggestion"] == "" # no hostname => no suggestion
|
||||||
|
|
||||||
for (suffix, signed_suffix) in r.json["suffixes"]:
|
for suffix, signed_suffix in r.json["suffixes"]:
|
||||||
assert signed_suffix.startswith(suffix)
|
assert signed_suffix.startswith(suffix)
|
||||||
|
|
||||||
# <<< with hostname >>>
|
# <<< with hostname >>>
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from app.dashboard.views import alias_transfer
|
import app.alias_utils
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
@ -29,7 +29,7 @@ def test_alias_transfer(flask_client):
|
|||||||
user_id=new_user.id, email="hey2@example.com", verified=True, commit=True
|
user_id=new_user.id, email="hey2@example.com", verified=True, commit=True
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_transfer.transfer(alias, new_user, new_user.mailboxes())
|
app.alias_utils.transfer_alias(alias, new_user, new_user.mailboxes())
|
||||||
|
|
||||||
# refresh from db
|
# refresh from db
|
||||||
alias = Alias.get(alias.id)
|
alias = Alias.get(alias.id)
|
||||||
|
@ -56,13 +56,15 @@ def test_get_jobs_to_run(flask_client):
|
|||||||
run_at=now.shift(hours=3),
|
run_at=now.shift(hours=3),
|
||||||
)
|
)
|
||||||
# Job out of attempts
|
# Job out of attempts
|
||||||
Job.create(
|
(
|
||||||
name="",
|
Job.create(
|
||||||
payload="",
|
name="",
|
||||||
state=JobState.taken.value,
|
payload="",
|
||||||
taken_at=now.shift(minutes=-(config.JOB_TAKEN_RETRY_WAIT_MINS + 10)),
|
state=JobState.taken.value,
|
||||||
attempts=config.JOB_MAX_ATTEMPTS + 1,
|
taken_at=now.shift(minutes=-(config.JOB_TAKEN_RETRY_WAIT_MINS + 10)),
|
||||||
),
|
attempts=config.JOB_MAX_ATTEMPTS + 1,
|
||||||
|
),
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
jobs = get_jobs_to_run()
|
jobs = get_jobs_to_run()
|
||||||
assert len(jobs) == len(expected_jobs_to_run)
|
assert len(jobs) == len(expected_jobs_to_run)
|
||||||
|
@ -18,7 +18,7 @@ from app.db import Session
|
|||||||
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
||||||
from app.models import Partner, PartnerUser, User
|
from app.models import Partner, PartnerUser, User
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.utils import random_string
|
from app.utils import random_string, canonicalize_email
|
||||||
from tests.utils import random_email
|
from tests.utils import random_email
|
||||||
|
|
||||||
|
|
||||||
@ -377,3 +377,48 @@ def test_link_account_with_uppercase(flask_client):
|
|||||||
)
|
)
|
||||||
assert partner_user.partner_id == get_proton_partner().id
|
assert partner_user.partner_id == get_proton_partner().id
|
||||||
assert partner_user.external_user_id == partner_user_id
|
assert partner_user.external_user_id == partner_user_id
|
||||||
|
|
||||||
|
|
||||||
|
def test_login_to_account_with_canonical_email(flask_client):
|
||||||
|
email = "a.{rand}@gmail.com".format(rand=random_string(10))
|
||||||
|
canonical_email = canonicalize_email(email)
|
||||||
|
assert email != canonical_email
|
||||||
|
partner_user_id = random_string()
|
||||||
|
link_request = random_link_request(
|
||||||
|
external_user_id=partner_user_id, email=email.upper()
|
||||||
|
)
|
||||||
|
user = create_user(canonical_email)
|
||||||
|
assert user.email == canonical_email
|
||||||
|
res = process_login_case(link_request, get_proton_partner())
|
||||||
|
assert res.user.id == user.id
|
||||||
|
|
||||||
|
|
||||||
|
def test_login_to_account_with_canonical_email_if_there_is_also_non_canonical(
|
||||||
|
flask_client,
|
||||||
|
):
|
||||||
|
email = "a.{rand}@gmail.com".format(rand=random_string(10))
|
||||||
|
canonical_email = canonicalize_email(email)
|
||||||
|
assert email != canonical_email
|
||||||
|
partner_user_id = random_string()
|
||||||
|
link_request = random_link_request(
|
||||||
|
external_user_id=partner_user_id, email=email.upper()
|
||||||
|
)
|
||||||
|
user = create_user(canonical_email)
|
||||||
|
create_user(email)
|
||||||
|
assert user.email == canonical_email
|
||||||
|
res = process_login_case(link_request, get_proton_partner())
|
||||||
|
assert res.user.id == user.id
|
||||||
|
|
||||||
|
|
||||||
|
def test_login_creates_account_with_canonical_email(
|
||||||
|
flask_client,
|
||||||
|
):
|
||||||
|
email = "a.{rand}@gmail.com".format(rand=random_string(10))
|
||||||
|
canonical_email = canonicalize_email(email)
|
||||||
|
assert email != canonical_email
|
||||||
|
partner_user_id = random_string()
|
||||||
|
link_request = random_link_request(
|
||||||
|
external_user_id=partner_user_id, email=email.upper()
|
||||||
|
)
|
||||||
|
res = process_login_case(link_request, get_proton_partner())
|
||||||
|
assert res.user.email == canonical_email
|
||||||
|
@ -1,18 +1,17 @@
|
|||||||
import arrow
|
import arrow
|
||||||
|
|
||||||
from app.models import CoinbaseSubscription, ApiToCookieToken, ApiKey
|
import cron
|
||||||
from cron import notify_manual_sub_end, delete_expired_tokens
|
from app.db import Session
|
||||||
|
from app.models import CoinbaseSubscription, ApiToCookieToken, ApiKey, User
|
||||||
from tests.utils import create_new_user
|
from tests.utils import create_new_user
|
||||||
|
|
||||||
|
|
||||||
def test_notify_manual_sub_end(flask_client):
|
def test_notify_manual_sub_end(flask_client):
|
||||||
user = create_new_user()
|
user = create_new_user()
|
||||||
|
|
||||||
CoinbaseSubscription.create(
|
CoinbaseSubscription.create(
|
||||||
user_id=user.id, end_at=arrow.now().shift(days=13, hours=2), commit=True
|
user_id=user.id, end_at=arrow.now().shift(days=13, hours=2), commit=True
|
||||||
)
|
)
|
||||||
|
cron.notify_manual_sub_end()
|
||||||
notify_manual_sub_end()
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleanup_tokens(flask_client):
|
def test_cleanup_tokens(flask_client):
|
||||||
@ -33,6 +32,22 @@ def test_cleanup_tokens(flask_client):
|
|||||||
api_key_id=api_key.id,
|
api_key_id=api_key.id,
|
||||||
commit=True,
|
commit=True,
|
||||||
).id
|
).id
|
||||||
delete_expired_tokens()
|
cron.delete_expired_tokens()
|
||||||
assert ApiToCookieToken.get(id_to_clean) is None
|
assert ApiToCookieToken.get(id_to_clean) is None
|
||||||
assert ApiToCookieToken.get(id_to_keep) is not None
|
assert ApiToCookieToken.get(id_to_keep) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup_users():
|
||||||
|
u_delete_none_id = create_new_user().id
|
||||||
|
u_delete_after = create_new_user()
|
||||||
|
u_delete_after_id = u_delete_after.id
|
||||||
|
u_delete_before = create_new_user()
|
||||||
|
u_delete_before_id = u_delete_before.id
|
||||||
|
now = arrow.now()
|
||||||
|
u_delete_after.delete_on = now.shift(minutes=1)
|
||||||
|
u_delete_before.delete_on = now.shift(minutes=-1)
|
||||||
|
Session.flush()
|
||||||
|
cron.clear_users_scheduled_to_be_deleted()
|
||||||
|
assert User.get(u_delete_none_id) is not None
|
||||||
|
assert User.get(u_delete_after_id) is not None
|
||||||
|
assert User.get(u_delete_before_id) is None
|
||||||
|
@ -199,3 +199,31 @@ def test_get_free_partner_and_hidden_default_domain():
|
|||||||
assert [d.domain for d in domains] == user.available_sl_domains(
|
assert [d.domain for d in domains] == user.available_sl_domains(
|
||||||
alias_options=options
|
alias_options=options
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_free_partner_and_premium_partner():
|
||||||
|
user = create_new_user()
|
||||||
|
user.trial_end = None
|
||||||
|
PartnerUser.create(
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
user_id=user.id,
|
||||||
|
external_user_id=random_token(10),
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
user.default_alias_public_domain_id = (
|
||||||
|
SLDomain.filter_by(hidden=False, premium_only=False).first().id
|
||||||
|
)
|
||||||
|
Session.flush()
|
||||||
|
options = AliasOptions(
|
||||||
|
show_sl_domains=False,
|
||||||
|
show_partner_domains=get_proton_partner(),
|
||||||
|
show_partner_premium=True,
|
||||||
|
)
|
||||||
|
domains = user.get_sl_domains(alias_options=options)
|
||||||
|
assert len(domains) == 3
|
||||||
|
assert domains[0].domain == "premium_partner"
|
||||||
|
assert domains[1].domain == "free_partner"
|
||||||
|
assert domains[2].domain == "free_non_partner"
|
||||||
|
assert [d.domain for d in domains] == user.available_sl_domains(
|
||||||
|
alias_options=options
|
||||||
|
)
|
||||||
|
@ -7,7 +7,7 @@ import arrow
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.config import MAX_ALERT_24H, EMAIL_DOMAIN, ROOT_DIR
|
from app.config import MAX_ALERT_24H, ROOT_DIR
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
get_email_domain_part,
|
get_email_domain_part,
|
||||||
@ -16,7 +16,6 @@ from app.email_utils import (
|
|||||||
delete_header,
|
delete_header,
|
||||||
add_or_replace_header,
|
add_or_replace_header,
|
||||||
send_email_with_rate_control,
|
send_email_with_rate_control,
|
||||||
copy,
|
|
||||||
get_spam_from_header,
|
get_spam_from_header,
|
||||||
get_header_from_bounce,
|
get_header_from_bounce,
|
||||||
add_header,
|
add_header,
|
||||||
|
@ -17,7 +17,7 @@ def test_encode_decode(flask_client):
|
|||||||
|
|
||||||
jwt_token = make_id_token(client_user)
|
jwt_token = make_id_token(client_user)
|
||||||
|
|
||||||
assert type(jwt_token) is str
|
assert isinstance(jwt_token, str)
|
||||||
assert verify_id_token(jwt_token)
|
assert verify_id_token(jwt_token)
|
||||||
|
|
||||||
|
|
||||||
|
@ -315,3 +315,13 @@ def test_create_contact_for_noreply(flask_client):
|
|||||||
reply_email=generate_reply_email(NOREPLY, alias),
|
reply_email=generate_reply_email(NOREPLY, alias),
|
||||||
)
|
)
|
||||||
assert contact.website_email == NOREPLY
|
assert contact.website_email == NOREPLY
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_can_send_receive():
|
||||||
|
user = create_new_user()
|
||||||
|
assert user.can_send_or_receive()
|
||||||
|
user.disabled = True
|
||||||
|
assert not user.can_send_or_receive()
|
||||||
|
user.disabled = False
|
||||||
|
user.delete_on = arrow.now()
|
||||||
|
assert not user.can_send_or_receive()
|
||||||
|
@ -49,9 +49,9 @@ def encrypt_decrypt_text(text: str):
|
|||||||
priv = pgpy.PGPKey()
|
priv = pgpy.PGPKey()
|
||||||
priv.parse(private_key)
|
priv.parse(private_key)
|
||||||
decrypted = priv.decrypt(encrypted).message
|
decrypted = priv.decrypt(encrypted).message
|
||||||
if type(decrypted) == str:
|
if isinstance(decrypted, str):
|
||||||
assert decrypted == text
|
assert decrypted == text
|
||||||
elif type(decrypted) == bytearray:
|
elif isinstance(decrypted, bytearray):
|
||||||
assert decrypted.decode() == text
|
assert decrypted.decode() == text
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ def load_eml_file(
|
|||||||
if not template_values:
|
if not template_values:
|
||||||
template_values = {}
|
template_values = {}
|
||||||
rendered = template.render(**template_values)
|
rendered = template.render(**template_values)
|
||||||
return email.message_from_string(rendered)
|
return email.message_from_bytes(rendered.encode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
def random_email() -> str:
|
def random_email() -> str:
|
||||||
|
Reference in New Issue
Block a user