From df611e47740eab2c6bcb569d9953d93173bf9f35 Mon Sep 17 00:00:00 2001 From: MrMeeb Date: Fri, 15 Mar 2024 12:00:08 +0000 Subject: [PATCH] 4.41.2 --- app/.github/workflows/main.yml | 10 +- app/CONTRIBUTING.md | 4 +- app/app/dashboard/views/alias_export.py | 2 + app/app/dashboard/views/batch_import.py | 4 +- app/cron.py | 123 +++++++++++++----- app/oneshot/emulate_dummy_load.py | 37 ++++++ .../mark_aliases_as_partner_created.py | 57 +++++--- ...acts.py => replace_noreply_in_contacts.py} | 0 app/templates/dashboard/account_setting.html | 15 --- app/templates/dashboard/setting.html | 19 ++- app/tests/cron/__init__.py | 0 app/tests/{ => cron}/test_cron.py | 0 app/tests/cron/test_get_alias_for_hibp.py | 116 +++++++++++++++++ 13 files changed, 318 insertions(+), 69 deletions(-) create mode 100644 app/oneshot/emulate_dummy_load.py rename app/oneshot/{replace_noreply_in_cotnacts.py => replace_noreply_in_contacts.py} (100%) create mode 100644 app/tests/cron/__init__.py rename app/tests/{ => cron}/test_cron.py (100%) create mode 100644 app/tests/cron/test_get_alias_for_hibp.py diff --git a/app/.github/workflows/main.yml b/app/.github/workflows/main.yml index c94a4e3..6e2ffd6 100644 --- a/app/.github/workflows/main.yml +++ b/app/.github/workflows/main.yml @@ -1,7 +1,6 @@ name: Test and lint -on: - push: +on: [push, pull_request] jobs: lint: @@ -139,6 +138,12 @@ jobs: with: fetch-depth: 0 + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Create Sentry release uses: getsentry/action-release@v1 env: @@ -158,6 +163,7 @@ jobs: uses: docker/build-push-action@v3 with: context: . + platforms: linux/amd64,linux/arm64 push: true tags: ${{ steps.meta.outputs.tags }} diff --git a/app/CONTRIBUTING.md b/app/CONTRIBUTING.md index 637d2fd..b6b1019 100644 --- a/app/CONTRIBUTING.md +++ b/app/CONTRIBUTING.md @@ -151,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles: ## Pull request -The code is formatted using https://github.com/psf/black, to format the code, simply run +The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run ``` -poetry run black . +poetry run ruff format . ``` The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by diff --git a/app/app/dashboard/views/alias_export.py b/app/app/dashboard/views/alias_export.py index f21df4b..159b00d 100644 --- a/app/app/dashboard/views/alias_export.py +++ b/app/app/dashboard/views/alias_export.py @@ -2,10 +2,12 @@ from app.dashboard.base import dashboard_bp from flask_login import login_required, current_user from app.alias_utils import alias_export_csv from app.dashboard.views.enter_sudo import sudo_required +from app.extensions import limiter @dashboard_bp.route("/alias_export", methods=["GET"]) @login_required @sudo_required +@limiter.limit("2/minute") def alias_export_route(): return alias_export_csv(current_user) diff --git a/app/app/dashboard/views/batch_import.py b/app/app/dashboard/views/batch_import.py index 7521cc0..3adf335 100644 --- a/app/app/dashboard/views/batch_import.py +++ b/app/app/dashboard/views/batch_import.py @@ -7,6 +7,7 @@ from app.config import JOB_BATCH_IMPORT from app.dashboard.base import dashboard_bp from app.dashboard.views.enter_sudo import sudo_required from app.db import Session +from app.extensions import limiter from app.log import LOG from app.models import File, BatchImport, Job from app.utils import random_string, CSRFValidationForm @@ -15,6 +16,7 @@ from app.utils import random_string, CSRFValidationForm @dashboard_bp.route("/batch_import", methods=["GET", "POST"]) @login_required @sudo_required +@limiter.limit("10/minute", methods=["POST"]) def batch_import_route(): # only for users who have custom domains if not current_user.verified_custom_domains(): @@ -39,7 +41,7 @@ def batch_import_route(): return redirect(request.url) if len(batch_imports) > 10: flash( - "You have too many imports already. Wait until some get cleaned up", + "You have too many imports already. Please wait until some get cleaned up", "error", ) return render_template( diff --git a/app/cron.py b/app/cron.py index b344696..b84aba7 100644 --- a/app/cron.py +++ b/app/cron.py @@ -5,7 +5,7 @@ from typing import List, Tuple import arrow import requests -from sqlalchemy import func, desc, or_, and_, nullsfirst +from sqlalchemy import func, desc, or_, and_ from sqlalchemy.ext.compiler import compiles from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import ObjectDeletedError @@ -976,6 +976,9 @@ async def _hibp_check(api_key, queue): continue user = alias.user if user.disabled or not user.is_paid(): + # Mark it as hibp done to skip it as if it had been checked + alias.hibp_last_check = arrow.utcnow() + Session.commit() continue LOG.d("Checking HIBP for %s", alias) @@ -1030,6 +1033,60 @@ async def _hibp_check(api_key, queue): await asyncio.sleep(rate_sleep) +def get_alias_to_check_hibp( + oldest_hibp_allowed: arrow.Arrow, + user_ids_to_skip: list[int], + min_alias_id: int, + max_alias_id: int, +): + now = arrow.now() + alias_query = ( + Session.query(Alias) + .join(User, User.id == Alias.user_id) + .join(Subscription, User.id == Subscription.user_id, isouter=True) + .join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True) + .join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True) + .join( + CoinbaseSubscription, + User.id == CoinbaseSubscription.user_id, + isouter=True, + ) + .join(PartnerUser, User.id == PartnerUser.user_id, isouter=True) + .join( + PartnerSubscription, + PartnerSubscription.partner_user_id == PartnerUser.id, + isouter=True, + ) + .filter( + or_( + Alias.hibp_last_check.is_(None), + Alias.hibp_last_check < oldest_hibp_allowed, + ), + Alias.user_id.notin_(user_ids_to_skip), + Alias.enabled, + Alias.id >= min_alias_id, + Alias.id < max_alias_id, + User.disabled == False, # noqa: E712 + or_( + User.lifetime, + ManualSubscription.end_at > now, + Subscription.next_bill_date > now.date(), + AppleSubscription.expires_date > now, + CoinbaseSubscription.end_at > now, + PartnerSubscription.end_at > now, + ), + ) + ) + if config.HIBP_SKIP_PARTNER_ALIAS: + alias_query = alias_query.filter( + Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0 + ) + for alias in ( + alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500) + ): + yield alias + + async def check_hibp(): """ Check all aliases on the HIBP (Have I Been Pwned) API @@ -1056,43 +1113,43 @@ async def check_hibp(): user_ids = [row[0] for row in rows] LOG.d("Got %d users to skip" % len(user_ids)) - LOG.d("Preparing list of aliases to check") + LOG.d("Checking aliases") queue = asyncio.Queue() - max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS) - alias_query = Alias.filter( - or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date), - Alias.user_id.notin_(user_ids), - Alias.enabled, - ) - if config.HIBP_SKIP_PARTNER_ALIAS: - alias_query = alias_query(Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0) - for alias in ( - alias_query.order_by(nullsfirst(Alias.hibp_last_check.asc()), Alias.id.asc()) - .yield_per(500) - .enable_eagerloads(False) - ): - await queue.put(alias.id) + min_alias_id = 0 + max_alias_id = Session.query(func.max(Alias.id)).scalar() + step = 10000 + now = arrow.now() + oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS) + alias_checked = 0 + for alias_batch_id in range(min_alias_id, max_alias_id, step): + for alias in get_alias_to_check_hibp( + oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step + ): + await queue.put(alias.id) - LOG.d("Need to check about %s aliases", queue.qsize()) - - # Start one checking process per API key - # Each checking process will take one alias from the queue, get the info - # and then sleep for 1.5 seconds (due to HIBP API request limits) - checkers = [] - for i in range(len(config.HIBP_API_KEYS)): - checker = asyncio.create_task( - _hibp_check( - config.HIBP_API_KEYS[i], - queue, - ) + alias_checked += queue.qsize() + LOG.d( + f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}" ) - checkers.append(checker) - # Wait until all checking processes are done - for checker in checkers: - await checker + # Start one checking process per API key + # Each checking process will take one alias from the queue, get the info + # and then sleep for 1.5 seconds (due to HIBP API request limits) + checkers = [] + for i in range(len(config.HIBP_API_KEYS)): + checker = asyncio.create_task( + _hibp_check( + config.HIBP_API_KEYS[i], + queue, + ) + ) + checkers.append(checker) - LOG.d("Done checking HIBP API for aliases in breaches") + # Wait until all checking processes are done + for checker in checkers: + await checker + + LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches") def notify_hibp(): diff --git a/app/oneshot/emulate_dummy_load.py b/app/oneshot/emulate_dummy_load.py new file mode 100644 index 0000000..9766abf --- /dev/null +++ b/app/oneshot/emulate_dummy_load.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +import argparse +import random +import time + +from sqlalchemy import func + +from app import config +from app.models import Alias, Contact +from app.db import Session + +parser = argparse.ArgumentParser( + prog=f"Replace {config.NOREPLY}", + description=f"Replace {config.NOREPLY} from contacts reply email", +) +args = parser.parse_args() + +max_alias_id: int = Session.query(func.max(Alias.id)).scalar() + +start = time.time() +tests = 1000 +for i in range(tests): + alias = ( + Alias.filter(Alias.id > int(random.random() * max_alias_id)) + .order_by(Alias.id.asc()) + .limit(1) + .first() + ) + contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first() + mailboxes = alias.mailboxes + user = alias.user + if i % 10: + print("{i} -> {alias.id}") + +end = time.time() +time_taken = end - start +print(f"Took {time_taken} -> {time_taken/tests} per test") diff --git a/app/oneshot/mark_aliases_as_partner_created.py b/app/oneshot/mark_aliases_as_partner_created.py index cf99db9..9c38fc0 100644 --- a/app/oneshot/mark_aliases_as_partner_created.py +++ b/app/oneshot/mark_aliases_as_partner_created.py @@ -1,29 +1,56 @@ #!/usr/bin/env python3 import argparse +import time +from sqlalchemy import func -from app.log import LOG from app.models import Alias, SLDomain from app.db import Session parser = argparse.ArgumentParser( prog="Mark partner created aliases with the PARTNER_CREATED flag", ) +parser.add_argument( + "-s", "--start_alias_id", default=0, type=int, help="Initial alias_id" +) +parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id") + args = parser.parse_args() +alias_id_start = args.start_alias_id +max_alias_id = args.end_alias_id +if max_alias_id == 0: + max_alias_id = Session.query(func.max(Alias.id)).scalar() + +print(f"Updating aliases from {alias_id_start} to {max_alias_id}") domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all() +cond = [f"email like '%{domain.domain}'" for domain in domains] +sql_or_cond = " OR ".join(cond) +sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})" +print(sql) -for domain in domains: - LOG.i(f"Checking aliases for domain {domain.domain}") - for alias in ( - Alias.filter( - Alias.email.like(f"%{domain.domain}"), - Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0, - ) - .enable_eagerloads(False) - .yield_per(100) - .all() - ): - alias.flags = alias.flags | Alias.FLAG_PARTNER_CREATED - LOG.i(f" * Updating {alias.email} to {alias.flags}") - Session.commit() +step = 1000 +updated = 0 +start_time = time.time() +for batch_start in range(alias_id_start, max_alias_id, step): + updated += Session.execute( + sql, + { + "start": batch_start, + "end": batch_start + step, + "flag": Alias.FLAG_PARTNER_CREATED, + }, + ).rowcount + elapsed = time.time() - start_time + time_per_alias = elapsed / (batch_start - alias_id_start + step) + last_batch_id = batch_start + step + remaining = max_alias_id - last_batch_id + time_remaining = (max_alias_id - last_batch_id) * time_per_alias + hours_remaining = time_remaining / 3600.0 + percent = int( + ((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start) + ) + print( + f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining" + ) +print(f"Updated aliases up to {max_alias_id}") diff --git a/app/oneshot/replace_noreply_in_cotnacts.py b/app/oneshot/replace_noreply_in_contacts.py similarity index 100% rename from app/oneshot/replace_noreply_in_cotnacts.py rename to app/oneshot/replace_noreply_in_contacts.py diff --git a/app/templates/dashboard/account_setting.html b/app/templates/dashboard/account_setting.html index a6ba0b5..58ad7ce 100644 --- a/app/templates/dashboard/account_setting.html +++ b/app/templates/dashboard/account_setting.html @@ -120,21 +120,6 @@ - -
-
-
Alias import/export
-
- You can import your aliases created on other platforms into SimpleLogin. - You can also export your aliases to a readable csv format for a future batch import. -
- Batch Import - Export Aliases -
-
-
diff --git a/app/templates/dashboard/setting.html b/app/templates/dashboard/setting.html index e3e7b17..0340376 100644 --- a/app/templates/dashboard/setting.html +++ b/app/templates/dashboard/setting.html @@ -559,7 +559,7 @@ sender address.
If this option is enabled, the original sender addresses is stored in the email header X-SimpleLogin-Envelope-From - and the original From header is stored in X-SimpleLogin-Original-From. + and the original From header is stored in X-SimpleLogin-Original-From. You can choose to display this header in your email client.
As email headers aren't encrypted, your mailbox service can know the sender address via this header. @@ -583,6 +583,23 @@
+ +
+
+
+ Alias import/export +
+
+ You can import your aliases created on other platforms into SimpleLogin. + You can also export your aliases to a readable csv format for a future batch import. +
+ Batch Import + Export Aliases +
+
+ {% endblock %} {% block script %} diff --git a/app/tests/cron/__init__.py b/app/tests/cron/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/tests/test_cron.py b/app/tests/cron/test_cron.py similarity index 100% rename from app/tests/test_cron.py rename to app/tests/cron/test_cron.py diff --git a/app/tests/cron/test_get_alias_for_hibp.py b/app/tests/cron/test_get_alias_for_hibp.py new file mode 100644 index 0000000..ca7d82a --- /dev/null +++ b/app/tests/cron/test_get_alias_for_hibp.py @@ -0,0 +1,116 @@ +import arrow +import pytest + +import cron +from app.db import Session +from app.models import ( + Alias, + AppleSubscription, + PlanEnum, + CoinbaseSubscription, + ManualSubscription, + Subscription, + PartnerUser, + PartnerSubscription, + User, +) +from app.proton.utils import get_proton_partner +from tests.utils import create_new_user, random_token + + +def test_get_alias_for_free_user_has_no_alias(): + user = create_new_user() + alias_id = Alias.create_new_random(user).id + Session.commit() + aliases = list( + cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1) + ) + assert len(aliases) == 0 + + +def test_get_alias_for_lifetime(): + user = create_new_user() + user.lifetime = True + alias_id = Alias.create_new_random(user).id + Session.commit() + aliases = list( + cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1) + ) + assert alias_id == aliases[0].id + + +def create_partner_sub(user: User): + pu = PartnerUser.create( + partner_id=get_proton_partner().id, + partner_email=user.email, + external_user_id=random_token(10), + user_id=user.id, + flush=True, + ) + PartnerSubscription.create( + partner_user_id=pu.id, end_at=arrow.utcnow().shift(days=15) + ) + + +sub_generator_list = [ + lambda u: AppleSubscription.create( + user_id=u.id, + expires_date=arrow.now().shift(days=15), + original_transaction_id=random_token(10), + receipt_data=random_token(10), + plan=PlanEnum.monthly, + ), + lambda u: CoinbaseSubscription.create( + user_id=u.id, + end_at=arrow.now().shift(days=15), + ), + lambda u: ManualSubscription.create( + user_id=u.id, + end_at=arrow.now().shift(days=15), + ), + lambda u: Subscription.create( + user_id=u.id, + cancel_url="", + update_url="", + subscription_id=random_token(10), + event_time=arrow.now(), + next_bill_date=arrow.now().shift(days=15).date(), + plan=PlanEnum.monthly, + ), + create_partner_sub, +] + + +@pytest.mark.parametrize("sub_generator", sub_generator_list) +def test_get_alias_for_sub(sub_generator): + user = create_new_user() + sub_generator(user) + alias_id = Alias.create_new_random(user).id + Session.commit() + aliases = list( + cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1) + ) + assert alias_id == aliases[0].id + + +def test_disabled_user_is_not_checked(): + user = create_new_user() + user.lifetime = True + user.disabled = True + alias_id = Alias.create_new_random(user).id + Session.commit() + aliases = list( + cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1) + ) + assert len(aliases) == 0 + + +def test_skipped_user_is_not_checked(): + user = create_new_user() + user.lifetime = True + alias_id = Alias.create_new_random(user).id + Session.commit() + aliases = list( + cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1) + ) + assert len(aliases) == 0