4.41.2
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m24s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m44s
Build-Release-Image / Merge-Images (push) Successful in 17s
Build-Release-Image / Create-Release (push) Successful in 11s
Build-Release-Image / Notify (push) Successful in 5s
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m24s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m44s
Build-Release-Image / Merge-Images (push) Successful in 17s
Build-Release-Image / Create-Release (push) Successful in 11s
Build-Release-Image / Notify (push) Successful in 5s
This commit is contained in:
parent
cb216393a5
commit
df611e4774
10
app/.github/workflows/main.yml
vendored
10
app/.github/workflows/main.yml
vendored
@ -1,7 +1,6 @@
|
|||||||
name: Test and lint
|
name: Test and lint
|
||||||
|
|
||||||
on:
|
on: [push, pull_request]
|
||||||
push:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@ -139,6 +138,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Create Sentry release
|
- name: Create Sentry release
|
||||||
uses: getsentry/action-release@v1
|
uses: getsentry/action-release@v1
|
||||||
env:
|
env:
|
||||||
@ -158,6 +163,7 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
|
||||||
|
@ -151,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles:
|
|||||||
|
|
||||||
## Pull request
|
## Pull request
|
||||||
|
|
||||||
The code is formatted using https://github.com/psf/black, to format the code, simply run
|
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||||
|
|
||||||
```
|
```
|
||||||
poetry run black .
|
poetry run ruff format .
|
||||||
```
|
```
|
||||||
|
|
||||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||||
|
@ -2,10 +2,12 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from app.alias_utils import alias_export_csv
|
from app.alias_utils import alias_export_csv
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
|
from app.extensions import limiter
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/alias_export", methods=["GET"])
|
@dashboard_bp.route("/alias_export", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
@sudo_required
|
@sudo_required
|
||||||
|
@limiter.limit("2/minute")
|
||||||
def alias_export_route():
|
def alias_export_route():
|
||||||
return alias_export_csv(current_user)
|
return alias_export_csv(current_user)
|
||||||
|
@ -7,6 +7,7 @@ from app.config import JOB_BATCH_IMPORT
|
|||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import File, BatchImport, Job
|
from app.models import File, BatchImport, Job
|
||||||
from app.utils import random_string, CSRFValidationForm
|
from app.utils import random_string, CSRFValidationForm
|
||||||
@ -15,6 +16,7 @@ from app.utils import random_string, CSRFValidationForm
|
|||||||
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
@sudo_required
|
@sudo_required
|
||||||
|
@limiter.limit("10/minute", methods=["POST"])
|
||||||
def batch_import_route():
|
def batch_import_route():
|
||||||
# only for users who have custom domains
|
# only for users who have custom domains
|
||||||
if not current_user.verified_custom_domains():
|
if not current_user.verified_custom_domains():
|
||||||
@ -39,7 +41,7 @@ def batch_import_route():
|
|||||||
return redirect(request.url)
|
return redirect(request.url)
|
||||||
if len(batch_imports) > 10:
|
if len(batch_imports) > 10:
|
||||||
flash(
|
flash(
|
||||||
"You have too many imports already. Wait until some get cleaned up",
|
"You have too many imports already. Please wait until some get cleaned up",
|
||||||
"error",
|
"error",
|
||||||
)
|
)
|
||||||
return render_template(
|
return render_template(
|
||||||
|
123
app/cron.py
123
app/cron.py
@ -5,7 +5,7 @@ from typing import List, Tuple
|
|||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import requests
|
import requests
|
||||||
from sqlalchemy import func, desc, or_, and_, nullsfirst
|
from sqlalchemy import func, desc, or_, and_
|
||||||
from sqlalchemy.ext.compiler import compiles
|
from sqlalchemy.ext.compiler import compiles
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||||
@ -976,6 +976,9 @@ async def _hibp_check(api_key, queue):
|
|||||||
continue
|
continue
|
||||||
user = alias.user
|
user = alias.user
|
||||||
if user.disabled or not user.is_paid():
|
if user.disabled or not user.is_paid():
|
||||||
|
# Mark it as hibp done to skip it as if it had been checked
|
||||||
|
alias.hibp_last_check = arrow.utcnow()
|
||||||
|
Session.commit()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
LOG.d("Checking HIBP for %s", alias)
|
LOG.d("Checking HIBP for %s", alias)
|
||||||
@ -1030,6 +1033,60 @@ async def _hibp_check(api_key, queue):
|
|||||||
await asyncio.sleep(rate_sleep)
|
await asyncio.sleep(rate_sleep)
|
||||||
|
|
||||||
|
|
||||||
|
def get_alias_to_check_hibp(
|
||||||
|
oldest_hibp_allowed: arrow.Arrow,
|
||||||
|
user_ids_to_skip: list[int],
|
||||||
|
min_alias_id: int,
|
||||||
|
max_alias_id: int,
|
||||||
|
):
|
||||||
|
now = arrow.now()
|
||||||
|
alias_query = (
|
||||||
|
Session.query(Alias)
|
||||||
|
.join(User, User.id == Alias.user_id)
|
||||||
|
.join(Subscription, User.id == Subscription.user_id, isouter=True)
|
||||||
|
.join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True)
|
||||||
|
.join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True)
|
||||||
|
.join(
|
||||||
|
CoinbaseSubscription,
|
||||||
|
User.id == CoinbaseSubscription.user_id,
|
||||||
|
isouter=True,
|
||||||
|
)
|
||||||
|
.join(PartnerUser, User.id == PartnerUser.user_id, isouter=True)
|
||||||
|
.join(
|
||||||
|
PartnerSubscription,
|
||||||
|
PartnerSubscription.partner_user_id == PartnerUser.id,
|
||||||
|
isouter=True,
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
or_(
|
||||||
|
Alias.hibp_last_check.is_(None),
|
||||||
|
Alias.hibp_last_check < oldest_hibp_allowed,
|
||||||
|
),
|
||||||
|
Alias.user_id.notin_(user_ids_to_skip),
|
||||||
|
Alias.enabled,
|
||||||
|
Alias.id >= min_alias_id,
|
||||||
|
Alias.id < max_alias_id,
|
||||||
|
User.disabled == False, # noqa: E712
|
||||||
|
or_(
|
||||||
|
User.lifetime,
|
||||||
|
ManualSubscription.end_at > now,
|
||||||
|
Subscription.next_bill_date > now.date(),
|
||||||
|
AppleSubscription.expires_date > now,
|
||||||
|
CoinbaseSubscription.end_at > now,
|
||||||
|
PartnerSubscription.end_at > now,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if config.HIBP_SKIP_PARTNER_ALIAS:
|
||||||
|
alias_query = alias_query.filter(
|
||||||
|
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0
|
||||||
|
)
|
||||||
|
for alias in (
|
||||||
|
alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500)
|
||||||
|
):
|
||||||
|
yield alias
|
||||||
|
|
||||||
|
|
||||||
async def check_hibp():
|
async def check_hibp():
|
||||||
"""
|
"""
|
||||||
Check all aliases on the HIBP (Have I Been Pwned) API
|
Check all aliases on the HIBP (Have I Been Pwned) API
|
||||||
@ -1056,43 +1113,43 @@ async def check_hibp():
|
|||||||
user_ids = [row[0] for row in rows]
|
user_ids = [row[0] for row in rows]
|
||||||
LOG.d("Got %d users to skip" % len(user_ids))
|
LOG.d("Got %d users to skip" % len(user_ids))
|
||||||
|
|
||||||
LOG.d("Preparing list of aliases to check")
|
LOG.d("Checking aliases")
|
||||||
queue = asyncio.Queue()
|
queue = asyncio.Queue()
|
||||||
max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
|
min_alias_id = 0
|
||||||
alias_query = Alias.filter(
|
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||||
or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date),
|
step = 10000
|
||||||
Alias.user_id.notin_(user_ids),
|
now = arrow.now()
|
||||||
Alias.enabled,
|
oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
|
||||||
)
|
alias_checked = 0
|
||||||
if config.HIBP_SKIP_PARTNER_ALIAS:
|
for alias_batch_id in range(min_alias_id, max_alias_id, step):
|
||||||
alias_query = alias_query(Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0)
|
for alias in get_alias_to_check_hibp(
|
||||||
for alias in (
|
oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step
|
||||||
alias_query.order_by(nullsfirst(Alias.hibp_last_check.asc()), Alias.id.asc())
|
):
|
||||||
.yield_per(500)
|
await queue.put(alias.id)
|
||||||
.enable_eagerloads(False)
|
|
||||||
):
|
|
||||||
await queue.put(alias.id)
|
|
||||||
|
|
||||||
LOG.d("Need to check about %s aliases", queue.qsize())
|
alias_checked += queue.qsize()
|
||||||
|
LOG.d(
|
||||||
# Start one checking process per API key
|
f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}"
|
||||||
# Each checking process will take one alias from the queue, get the info
|
|
||||||
# and then sleep for 1.5 seconds (due to HIBP API request limits)
|
|
||||||
checkers = []
|
|
||||||
for i in range(len(config.HIBP_API_KEYS)):
|
|
||||||
checker = asyncio.create_task(
|
|
||||||
_hibp_check(
|
|
||||||
config.HIBP_API_KEYS[i],
|
|
||||||
queue,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
checkers.append(checker)
|
|
||||||
|
|
||||||
# Wait until all checking processes are done
|
# Start one checking process per API key
|
||||||
for checker in checkers:
|
# Each checking process will take one alias from the queue, get the info
|
||||||
await checker
|
# and then sleep for 1.5 seconds (due to HIBP API request limits)
|
||||||
|
checkers = []
|
||||||
|
for i in range(len(config.HIBP_API_KEYS)):
|
||||||
|
checker = asyncio.create_task(
|
||||||
|
_hibp_check(
|
||||||
|
config.HIBP_API_KEYS[i],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
checkers.append(checker)
|
||||||
|
|
||||||
LOG.d("Done checking HIBP API for aliases in breaches")
|
# Wait until all checking processes are done
|
||||||
|
for checker in checkers:
|
||||||
|
await checker
|
||||||
|
|
||||||
|
LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches")
|
||||||
|
|
||||||
|
|
||||||
def notify_hibp():
|
def notify_hibp():
|
||||||
|
37
app/oneshot/emulate_dummy_load.py
Normal file
37
app/oneshot/emulate_dummy_load.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.models import Alias, Contact
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog=f"Replace {config.NOREPLY}",
|
||||||
|
description=f"Replace {config.NOREPLY} from contacts reply email",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
max_alias_id: int = Session.query(func.max(Alias.id)).scalar()
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
tests = 1000
|
||||||
|
for i in range(tests):
|
||||||
|
alias = (
|
||||||
|
Alias.filter(Alias.id > int(random.random() * max_alias_id))
|
||||||
|
.order_by(Alias.id.asc())
|
||||||
|
.limit(1)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first()
|
||||||
|
mailboxes = alias.mailboxes
|
||||||
|
user = alias.user
|
||||||
|
if i % 10:
|
||||||
|
print("{i} -> {alias.id}")
|
||||||
|
|
||||||
|
end = time.time()
|
||||||
|
time_taken = end - start
|
||||||
|
print(f"Took {time_taken} -> {time_taken/tests} per test")
|
@ -1,29 +1,56 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import Alias, SLDomain
|
from app.models import Alias, SLDomain
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog="Mark partner created aliases with the PARTNER_CREATED flag",
|
prog="Mark partner created aliases with the PARTNER_CREATED flag",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
|
||||||
|
)
|
||||||
|
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
alias_id_start = args.start_alias_id
|
||||||
|
max_alias_id = args.end_alias_id
|
||||||
|
if max_alias_id == 0:
|
||||||
|
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||||
|
|
||||||
|
print(f"Updating aliases from {alias_id_start} to {max_alias_id}")
|
||||||
|
|
||||||
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
|
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
|
||||||
|
cond = [f"email like '%{domain.domain}'" for domain in domains]
|
||||||
|
sql_or_cond = " OR ".join(cond)
|
||||||
|
sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})"
|
||||||
|
print(sql)
|
||||||
|
|
||||||
for domain in domains:
|
step = 1000
|
||||||
LOG.i(f"Checking aliases for domain {domain.domain}")
|
updated = 0
|
||||||
for alias in (
|
start_time = time.time()
|
||||||
Alias.filter(
|
for batch_start in range(alias_id_start, max_alias_id, step):
|
||||||
Alias.email.like(f"%{domain.domain}"),
|
updated += Session.execute(
|
||||||
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0,
|
sql,
|
||||||
)
|
{
|
||||||
.enable_eagerloads(False)
|
"start": batch_start,
|
||||||
.yield_per(100)
|
"end": batch_start + step,
|
||||||
.all()
|
"flag": Alias.FLAG_PARTNER_CREATED,
|
||||||
):
|
},
|
||||||
alias.flags = alias.flags | Alias.FLAG_PARTNER_CREATED
|
).rowcount
|
||||||
LOG.i(f" * Updating {alias.email} to {alias.flags}")
|
elapsed = time.time() - start_time
|
||||||
Session.commit()
|
time_per_alias = elapsed / (batch_start - alias_id_start + step)
|
||||||
|
last_batch_id = batch_start + step
|
||||||
|
remaining = max_alias_id - last_batch_id
|
||||||
|
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
|
||||||
|
hours_remaining = time_remaining / 3600.0
|
||||||
|
percent = int(
|
||||||
|
((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start)
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining"
|
||||||
|
)
|
||||||
|
print(f"Updated aliases up to {max_alias_id}")
|
||||||
|
@ -120,21 +120,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<!-- END WebAuthn -->
|
<!-- END WebAuthn -->
|
||||||
<!-- Alias import/export -->
|
|
||||||
<div class="card">
|
|
||||||
<div class="card-body">
|
|
||||||
<div class="card-title">Alias import/export</div>
|
|
||||||
<div class="mb-3">
|
|
||||||
You can import your aliases created on other platforms into SimpleLogin.
|
|
||||||
You can also export your aliases to a readable csv format for a future batch import.
|
|
||||||
</div>
|
|
||||||
<a href="{{ url_for('dashboard.batch_import_route') }}"
|
|
||||||
class="btn btn-outline-primary">Batch Import</a>
|
|
||||||
<a href="{{ url_for('dashboard.alias_export_route') }}"
|
|
||||||
class="btn btn-outline-secondary">Export Aliases</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- END Alias import/export -->
|
|
||||||
<!-- data export -->
|
<!-- data export -->
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
|
@ -559,7 +559,7 @@
|
|||||||
sender address.
|
sender address.
|
||||||
<br />
|
<br />
|
||||||
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
|
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
|
||||||
and the original From header is stored in <b>X-SimpleLogin-Original-From<b>.
|
and the original From header is stored in <b>X-SimpleLogin-Original-From</b>.
|
||||||
You can choose to display this header in your email client.
|
You can choose to display this header in your email client.
|
||||||
<br />
|
<br />
|
||||||
As email headers aren't encrypted, your mailbox service can know the sender address via this header.
|
As email headers aren't encrypted, your mailbox service can know the sender address via this header.
|
||||||
@ -583,6 +583,23 @@
|
|||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<!-- Alias import/export -->
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="card-title">
|
||||||
|
Alias import/export
|
||||||
|
</div>
|
||||||
|
<div class="mb-3">
|
||||||
|
You can import your aliases created on other platforms into SimpleLogin.
|
||||||
|
You can also export your aliases to a readable csv format for a future batch import.
|
||||||
|
</div>
|
||||||
|
<a href="{{ url_for('dashboard.batch_import_route') }}"
|
||||||
|
class="btn btn-outline-primary">Batch Import</a>
|
||||||
|
<a href="{{ url_for('dashboard.alias_export_route') }}"
|
||||||
|
class="btn btn-outline-secondary">Export Aliases</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- END Alias import/export -->
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block script %}
|
{% block script %}
|
||||||
|
0
app/tests/cron/__init__.py
Normal file
0
app/tests/cron/__init__.py
Normal file
116
app/tests/cron/test_get_alias_for_hibp.py
Normal file
116
app/tests/cron/test_get_alias_for_hibp.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
import arrow
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import cron
|
||||||
|
from app.db import Session
|
||||||
|
from app.models import (
|
||||||
|
Alias,
|
||||||
|
AppleSubscription,
|
||||||
|
PlanEnum,
|
||||||
|
CoinbaseSubscription,
|
||||||
|
ManualSubscription,
|
||||||
|
Subscription,
|
||||||
|
PartnerUser,
|
||||||
|
PartnerSubscription,
|
||||||
|
User,
|
||||||
|
)
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
|
from tests.utils import create_new_user, random_token
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_for_free_user_has_no_alias():
|
||||||
|
user = create_new_user()
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_for_lifetime():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert alias_id == aliases[0].id
|
||||||
|
|
||||||
|
|
||||||
|
def create_partner_sub(user: User):
|
||||||
|
pu = PartnerUser.create(
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
partner_email=user.email,
|
||||||
|
external_user_id=random_token(10),
|
||||||
|
user_id=user.id,
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
PartnerSubscription.create(
|
||||||
|
partner_user_id=pu.id, end_at=arrow.utcnow().shift(days=15)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
sub_generator_list = [
|
||||||
|
lambda u: AppleSubscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
expires_date=arrow.now().shift(days=15),
|
||||||
|
original_transaction_id=random_token(10),
|
||||||
|
receipt_data=random_token(10),
|
||||||
|
plan=PlanEnum.monthly,
|
||||||
|
),
|
||||||
|
lambda u: CoinbaseSubscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
end_at=arrow.now().shift(days=15),
|
||||||
|
),
|
||||||
|
lambda u: ManualSubscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
end_at=arrow.now().shift(days=15),
|
||||||
|
),
|
||||||
|
lambda u: Subscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
cancel_url="",
|
||||||
|
update_url="",
|
||||||
|
subscription_id=random_token(10),
|
||||||
|
event_time=arrow.now(),
|
||||||
|
next_bill_date=arrow.now().shift(days=15).date(),
|
||||||
|
plan=PlanEnum.monthly,
|
||||||
|
),
|
||||||
|
create_partner_sub,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("sub_generator", sub_generator_list)
|
||||||
|
def test_get_alias_for_sub(sub_generator):
|
||||||
|
user = create_new_user()
|
||||||
|
sub_generator(user)
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert alias_id == aliases[0].id
|
||||||
|
|
||||||
|
|
||||||
|
def test_disabled_user_is_not_checked():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
user.disabled = True
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_skipped_user_is_not_checked():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
Loading…
x
Reference in New Issue
Block a user