Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
78184eeae4 | |||
c111fbe8e1 | |||
d5981588e4 |
10
app/.github/workflows/main.yml
vendored
10
app/.github/workflows/main.yml
vendored
@ -1,7 +1,6 @@
|
|||||||
name: Test and lint
|
name: Test and lint
|
||||||
|
|
||||||
on:
|
on: [push, pull_request]
|
||||||
push:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@ -139,6 +138,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Create Sentry release
|
- name: Create Sentry release
|
||||||
uses: getsentry/action-release@v1
|
uses: getsentry/action-release@v1
|
||||||
env:
|
env:
|
||||||
@ -158,6 +163,7 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
|
||||||
|
@ -151,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles:
|
|||||||
|
|
||||||
## Pull request
|
## Pull request
|
||||||
|
|
||||||
The code is formatted using https://github.com/psf/black, to format the code, simply run
|
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||||
|
|
||||||
```
|
```
|
||||||
poetry run black .
|
poetry run ruff format .
|
||||||
```
|
```
|
||||||
|
|
||||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||||
|
@ -434,6 +434,8 @@ HIBP_MAX_ALIAS_CHECK = 10_000
|
|||||||
HIBP_RPM = 100
|
HIBP_RPM = 100
|
||||||
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
||||||
|
|
||||||
|
KEEP_OLD_DATA_DAYS = 30
|
||||||
|
|
||||||
POSTMASTER = os.environ.get("POSTMASTER")
|
POSTMASTER = os.environ.get("POSTMASTER")
|
||||||
|
|
||||||
# store temporary files, especially for debugging
|
# store temporary files, especially for debugging
|
||||||
|
@ -2,10 +2,12 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from app.alias_utils import alias_export_csv
|
from app.alias_utils import alias_export_csv
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
|
from app.extensions import limiter
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/alias_export", methods=["GET"])
|
@dashboard_bp.route("/alias_export", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
@sudo_required
|
@sudo_required
|
||||||
|
@limiter.limit("2/minute")
|
||||||
def alias_export_route():
|
def alias_export_route():
|
||||||
return alias_export_csv(current_user)
|
return alias_export_csv(current_user)
|
||||||
|
@ -7,6 +7,7 @@ from app.config import JOB_BATCH_IMPORT
|
|||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import File, BatchImport, Job
|
from app.models import File, BatchImport, Job
|
||||||
from app.utils import random_string, CSRFValidationForm
|
from app.utils import random_string, CSRFValidationForm
|
||||||
@ -15,6 +16,7 @@ from app.utils import random_string, CSRFValidationForm
|
|||||||
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
@sudo_required
|
@sudo_required
|
||||||
|
@limiter.limit("10/minute", methods=["POST"])
|
||||||
def batch_import_route():
|
def batch_import_route():
|
||||||
# only for users who have custom domains
|
# only for users who have custom domains
|
||||||
if not current_user.verified_custom_domains():
|
if not current_user.verified_custom_domains():
|
||||||
@ -39,7 +41,7 @@ def batch_import_route():
|
|||||||
return redirect(request.url)
|
return redirect(request.url)
|
||||||
if len(batch_imports) > 10:
|
if len(batch_imports) > 10:
|
||||||
flash(
|
flash(
|
||||||
"You have too many imports already. Wait until some get cleaned up",
|
"You have too many imports already. Please wait until some get cleaned up",
|
||||||
"error",
|
"error",
|
||||||
)
|
)
|
||||||
return render_template(
|
return render_template(
|
||||||
|
@ -21,6 +21,7 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
|||||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||||
RETURN_PATH = "Return-Path"
|
RETURN_PATH = "Return-Path"
|
||||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||||
|
SL_QUEUE_ID = "X-SL-Queue-Id"
|
||||||
|
|
||||||
# headers used to DKIM sign in order of preference
|
# headers used to DKIM sign in order of preference
|
||||||
DKIM_HEADERS = [
|
DKIM_HEADERS = [
|
||||||
|
@ -494,9 +494,10 @@ def delete_header(msg: Message, header: str):
|
|||||||
|
|
||||||
def sanitize_header(msg: Message, header: str):
|
def sanitize_header(msg: Message, header: str):
|
||||||
"""remove trailing space and remove linebreak from a header"""
|
"""remove trailing space and remove linebreak from a header"""
|
||||||
|
header_lowercase = header.lower()
|
||||||
for i in reversed(range(len(msg._headers))):
|
for i in reversed(range(len(msg._headers))):
|
||||||
header_name = msg._headers[i][0].lower()
|
header_name = msg._headers[i][0].lower()
|
||||||
if header_name == header.lower():
|
if header_name == header_lowercase:
|
||||||
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
|
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
|
||||||
if msg._headers[i][1]:
|
if msg._headers[i][1]:
|
||||||
msg._headers[i] = (
|
msg._headers[i] = (
|
||||||
|
@ -30,7 +30,9 @@ def apply_dmarc_policy_for_forward_phase(
|
|||||||
) -> Tuple[Message, Optional[str]]:
|
) -> Tuple[Message, Optional[str]]:
|
||||||
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
|
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
|
||||||
if not DMARC_CHECK_ENABLED or not spam_result:
|
if not DMARC_CHECK_ENABLED or not spam_result:
|
||||||
|
LOG.i("DMARC check disabled")
|
||||||
return msg, None
|
return msg, None
|
||||||
|
LOG.i(f"Spam check result in {spam_result}")
|
||||||
|
|
||||||
from_header = get_header_unicode(msg[headers.FROM])
|
from_header = get_header_unicode(msg[headers.FROM])
|
||||||
|
|
||||||
@ -150,8 +152,10 @@ def apply_dmarc_policy_for_reply_phase(
|
|||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
|
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
|
||||||
if not DMARC_CHECK_ENABLED or not spam_result:
|
if not DMARC_CHECK_ENABLED or not spam_result:
|
||||||
|
LOG.i("DMARC check disabled")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
LOG.i(f"Spam check result is {spam_result}")
|
||||||
if spam_result.dmarc not in (
|
if spam_result.dmarc not in (
|
||||||
DmarcCheckResult.quarantine,
|
DmarcCheckResult.quarantine,
|
||||||
DmarcCheckResult.reject,
|
DmarcCheckResult.reject,
|
||||||
|
@ -30,7 +30,10 @@ def handle_batch_import(batch_import: BatchImport):
|
|||||||
|
|
||||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||||
r = requests.get(file_url)
|
r = requests.get(file_url)
|
||||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
# Replace invisible character
|
||||||
|
lines = [
|
||||||
|
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
|
||||||
|
]
|
||||||
|
|
||||||
import_from_csv(batch_import, user, lines)
|
import_from_csv(batch_import, user, lines)
|
||||||
|
|
||||||
|
@ -5,19 +5,9 @@ from typing import Optional
|
|||||||
import boto3
|
import boto3
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from app.config import (
|
from app import config
|
||||||
AWS_REGION,
|
|
||||||
BUCKET,
|
|
||||||
AWS_ACCESS_KEY_ID,
|
|
||||||
AWS_SECRET_ACCESS_KEY,
|
|
||||||
LOCAL_FILE_UPLOAD,
|
|
||||||
UPLOAD_DIR,
|
|
||||||
URL,
|
|
||||||
AWS_ENDPOINT_URL,
|
|
||||||
)
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
|
|
||||||
|
|
||||||
_s3_client = None
|
_s3_client = None
|
||||||
|
|
||||||
|
|
||||||
@ -25,12 +15,12 @@ def _get_s3client():
|
|||||||
global _s3_client
|
global _s3_client
|
||||||
if _s3_client is None:
|
if _s3_client is None:
|
||||||
args = {
|
args = {
|
||||||
"aws_access_key_id": AWS_ACCESS_KEY_ID,
|
"aws_access_key_id": config.AWS_ACCESS_KEY_ID,
|
||||||
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
|
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY,
|
||||||
"region_name": AWS_REGION,
|
"region_name": config.AWS_REGION,
|
||||||
}
|
}
|
||||||
if AWS_ENDPOINT_URL:
|
if config.AWS_ENDPOINT_URL:
|
||||||
args["endpoint_url"] = AWS_ENDPOINT_URL
|
args["endpoint_url"] = config.AWS_ENDPOINT_URL
|
||||||
_s3_client = boto3.client("s3", **args)
|
_s3_client = boto3.client("s3", **args)
|
||||||
return _s3_client
|
return _s3_client
|
||||||
|
|
||||||
@ -38,8 +28,8 @@ def _get_s3client():
|
|||||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
||||||
bs.seek(0)
|
bs.seek(0)
|
||||||
|
|
||||||
if LOCAL_FILE_UPLOAD:
|
if config.LOCAL_FILE_UPLOAD:
|
||||||
file_path = os.path.join(UPLOAD_DIR, key)
|
file_path = os.path.join(config.UPLOAD_DIR, key)
|
||||||
file_dir = os.path.dirname(file_path)
|
file_dir = os.path.dirname(file_path)
|
||||||
os.makedirs(file_dir, exist_ok=True)
|
os.makedirs(file_dir, exist_ok=True)
|
||||||
with open(file_path, "wb") as f:
|
with open(file_path, "wb") as f:
|
||||||
@ -47,7 +37,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
_get_s3client().put_object(
|
_get_s3client().put_object(
|
||||||
Bucket=BUCKET,
|
Bucket=config.BUCKET,
|
||||||
Key=key,
|
Key=key,
|
||||||
Body=bs,
|
Body=bs,
|
||||||
ContentType=content_type,
|
ContentType=content_type,
|
||||||
@ -57,8 +47,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
|
|||||||
def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
||||||
bs.seek(0)
|
bs.seek(0)
|
||||||
|
|
||||||
if LOCAL_FILE_UPLOAD:
|
if config.LOCAL_FILE_UPLOAD:
|
||||||
file_path = os.path.join(UPLOAD_DIR, path)
|
file_path = os.path.join(config.UPLOAD_DIR, path)
|
||||||
file_dir = os.path.dirname(file_path)
|
file_dir = os.path.dirname(file_path)
|
||||||
os.makedirs(file_dir, exist_ok=True)
|
os.makedirs(file_dir, exist_ok=True)
|
||||||
with open(file_path, "wb") as f:
|
with open(file_path, "wb") as f:
|
||||||
@ -66,7 +56,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
_get_s3client().put_object(
|
_get_s3client().put_object(
|
||||||
Bucket=BUCKET,
|
Bucket=config.BUCKET,
|
||||||
Key=path,
|
Key=path,
|
||||||
Body=bs,
|
Body=bs,
|
||||||
# Support saving a remote file using Http header
|
# Support saving a remote file using Http header
|
||||||
@ -77,12 +67,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
|||||||
|
|
||||||
|
|
||||||
def download_email(path: str) -> Optional[str]:
|
def download_email(path: str) -> Optional[str]:
|
||||||
if LOCAL_FILE_UPLOAD:
|
if config.LOCAL_FILE_UPLOAD:
|
||||||
file_path = os.path.join(UPLOAD_DIR, path)
|
file_path = os.path.join(config.UPLOAD_DIR, path)
|
||||||
with open(file_path, "rb") as f:
|
with open(file_path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
resp = _get_s3client().get_object(
|
resp = _get_s3client().get_object(
|
||||||
Bucket=BUCKET,
|
Bucket=config.BUCKET,
|
||||||
Key=path,
|
Key=path,
|
||||||
)
|
)
|
||||||
if not resp or "Body" not in resp:
|
if not resp or "Body" not in resp:
|
||||||
@ -96,29 +86,30 @@ def upload_from_url(url: str, upload_path):
|
|||||||
|
|
||||||
|
|
||||||
def get_url(key: str, expires_in=3600) -> str:
|
def get_url(key: str, expires_in=3600) -> str:
|
||||||
if LOCAL_FILE_UPLOAD:
|
if config.LOCAL_FILE_UPLOAD:
|
||||||
return URL + "/static/upload/" + key
|
return config.URL + "/static/upload/" + key
|
||||||
else:
|
else:
|
||||||
return _get_s3client().generate_presigned_url(
|
return _get_s3client().generate_presigned_url(
|
||||||
ExpiresIn=expires_in,
|
ExpiresIn=expires_in,
|
||||||
ClientMethod="get_object",
|
ClientMethod="get_object",
|
||||||
Params={"Bucket": BUCKET, "Key": key},
|
Params={"Bucket": config.BUCKET, "Key": key},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete(path: str):
|
def delete(path: str):
|
||||||
if LOCAL_FILE_UPLOAD:
|
if config.LOCAL_FILE_UPLOAD:
|
||||||
os.remove(os.path.join(UPLOAD_DIR, path))
|
file_path = os.path.join(config.UPLOAD_DIR, path)
|
||||||
|
os.remove(file_path)
|
||||||
else:
|
else:
|
||||||
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
|
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path)
|
||||||
|
|
||||||
|
|
||||||
def create_bucket_if_not_exists():
|
def create_bucket_if_not_exists():
|
||||||
s3client = _get_s3client()
|
s3client = _get_s3client()
|
||||||
buckets = s3client.list_buckets()
|
buckets = s3client.list_buckets()
|
||||||
for bucket in buckets["Buckets"]:
|
for bucket in buckets["Buckets"]:
|
||||||
if bucket["Name"] == BUCKET:
|
if bucket["Name"] == config.BUCKET:
|
||||||
LOG.i("Bucket already exists")
|
LOG.i("Bucket already exists")
|
||||||
return
|
return
|
||||||
s3client.create_bucket(Bucket=BUCKET)
|
s3client.create_bucket(Bucket=config.BUCKET)
|
||||||
LOG.i(f"Bucket {BUCKET} created")
|
LOG.i(f"Bucket {config.BUCKET} created")
|
||||||
|
103
app/cron.py
103
app/cron.py
@ -5,7 +5,7 @@ from typing import List, Tuple
|
|||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import requests
|
import requests
|
||||||
from sqlalchemy import func, desc, or_, and_, nullsfirst
|
from sqlalchemy import func, desc, or_, and_
|
||||||
from sqlalchemy.ext.compiler import compiles
|
from sqlalchemy.ext.compiler import compiles
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||||
@ -61,6 +61,9 @@ from app.pgp_utils import load_public_key_and_check, PGPException
|
|||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||||
|
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||||
|
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||||
|
|
||||||
DELETE_GRACE_DAYS = 30
|
DELETE_GRACE_DAYS = 30
|
||||||
|
|
||||||
@ -976,6 +979,9 @@ async def _hibp_check(api_key, queue):
|
|||||||
continue
|
continue
|
||||||
user = alias.user
|
user = alias.user
|
||||||
if user.disabled or not user.is_paid():
|
if user.disabled or not user.is_paid():
|
||||||
|
# Mark it as hibp done to skip it as if it had been checked
|
||||||
|
alias.hibp_last_check = arrow.utcnow()
|
||||||
|
Session.commit()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
LOG.d("Checking HIBP for %s", alias)
|
LOG.d("Checking HIBP for %s", alias)
|
||||||
@ -1030,6 +1036,60 @@ async def _hibp_check(api_key, queue):
|
|||||||
await asyncio.sleep(rate_sleep)
|
await asyncio.sleep(rate_sleep)
|
||||||
|
|
||||||
|
|
||||||
|
def get_alias_to_check_hibp(
|
||||||
|
oldest_hibp_allowed: arrow.Arrow,
|
||||||
|
user_ids_to_skip: list[int],
|
||||||
|
min_alias_id: int,
|
||||||
|
max_alias_id: int,
|
||||||
|
):
|
||||||
|
now = arrow.now()
|
||||||
|
alias_query = (
|
||||||
|
Session.query(Alias)
|
||||||
|
.join(User, User.id == Alias.user_id)
|
||||||
|
.join(Subscription, User.id == Subscription.user_id, isouter=True)
|
||||||
|
.join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True)
|
||||||
|
.join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True)
|
||||||
|
.join(
|
||||||
|
CoinbaseSubscription,
|
||||||
|
User.id == CoinbaseSubscription.user_id,
|
||||||
|
isouter=True,
|
||||||
|
)
|
||||||
|
.join(PartnerUser, User.id == PartnerUser.user_id, isouter=True)
|
||||||
|
.join(
|
||||||
|
PartnerSubscription,
|
||||||
|
PartnerSubscription.partner_user_id == PartnerUser.id,
|
||||||
|
isouter=True,
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
or_(
|
||||||
|
Alias.hibp_last_check.is_(None),
|
||||||
|
Alias.hibp_last_check < oldest_hibp_allowed,
|
||||||
|
),
|
||||||
|
Alias.user_id.notin_(user_ids_to_skip),
|
||||||
|
Alias.enabled,
|
||||||
|
Alias.id >= min_alias_id,
|
||||||
|
Alias.id < max_alias_id,
|
||||||
|
User.disabled == False, # noqa: E712
|
||||||
|
or_(
|
||||||
|
User.lifetime,
|
||||||
|
ManualSubscription.end_at > now,
|
||||||
|
Subscription.next_bill_date > now.date(),
|
||||||
|
AppleSubscription.expires_date > now,
|
||||||
|
CoinbaseSubscription.end_at > now,
|
||||||
|
PartnerSubscription.end_at > now,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if config.HIBP_SKIP_PARTNER_ALIAS:
|
||||||
|
alias_query = alias_query.filter(
|
||||||
|
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0
|
||||||
|
)
|
||||||
|
for alias in (
|
||||||
|
alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500)
|
||||||
|
):
|
||||||
|
yield alias
|
||||||
|
|
||||||
|
|
||||||
async def check_hibp():
|
async def check_hibp():
|
||||||
"""
|
"""
|
||||||
Check all aliases on the HIBP (Have I Been Pwned) API
|
Check all aliases on the HIBP (Have I Been Pwned) API
|
||||||
@ -1056,24 +1116,24 @@ async def check_hibp():
|
|||||||
user_ids = [row[0] for row in rows]
|
user_ids = [row[0] for row in rows]
|
||||||
LOG.d("Got %d users to skip" % len(user_ids))
|
LOG.d("Got %d users to skip" % len(user_ids))
|
||||||
|
|
||||||
LOG.d("Preparing list of aliases to check")
|
LOG.d("Checking aliases")
|
||||||
queue = asyncio.Queue()
|
queue = asyncio.Queue()
|
||||||
max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
|
min_alias_id = 0
|
||||||
alias_query = Alias.filter(
|
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||||
or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date),
|
step = 10000
|
||||||
Alias.user_id.notin_(user_ids),
|
now = arrow.now()
|
||||||
Alias.enabled,
|
oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
|
||||||
)
|
alias_checked = 0
|
||||||
if config.HIBP_SKIP_PARTNER_ALIAS:
|
for alias_batch_id in range(min_alias_id, max_alias_id, step):
|
||||||
alias_query = alias_query(Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0)
|
for alias in get_alias_to_check_hibp(
|
||||||
for alias in (
|
oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step
|
||||||
alias_query.order_by(nullsfirst(Alias.hibp_last_check.asc()), Alias.id.asc())
|
|
||||||
.yield_per(500)
|
|
||||||
.enable_eagerloads(False)
|
|
||||||
):
|
):
|
||||||
await queue.put(alias.id)
|
await queue.put(alias.id)
|
||||||
|
|
||||||
LOG.d("Need to check about %s aliases", queue.qsize())
|
alias_checked += queue.qsize()
|
||||||
|
LOG.d(
|
||||||
|
f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}"
|
||||||
|
)
|
||||||
|
|
||||||
# Start one checking process per API key
|
# Start one checking process per API key
|
||||||
# Each checking process will take one alias from the queue, get the info
|
# Each checking process will take one alias from the queue, get the info
|
||||||
@ -1092,7 +1152,7 @@ async def check_hibp():
|
|||||||
for checker in checkers:
|
for checker in checkers:
|
||||||
await checker
|
await checker
|
||||||
|
|
||||||
LOG.d("Done checking HIBP API for aliases in breaches")
|
LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches")
|
||||||
|
|
||||||
|
|
||||||
def notify_hibp():
|
def notify_hibp():
|
||||||
@ -1164,6 +1224,13 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def delete_old_data():
|
||||||
|
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
|
||||||
|
cleanup_old_imports(oldest_valid)
|
||||||
|
cleanup_old_jobs(oldest_valid)
|
||||||
|
cleanup_old_notifications(oldest_valid)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
LOG.d("Start running cronjob")
|
LOG.d("Start running cronjob")
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
@ -1178,6 +1245,7 @@ if __name__ == "__main__":
|
|||||||
"notify_manual_subscription_end",
|
"notify_manual_subscription_end",
|
||||||
"notify_premium_end",
|
"notify_premium_end",
|
||||||
"delete_logs",
|
"delete_logs",
|
||||||
|
"delete_old_data",
|
||||||
"poll_apple_subscription",
|
"poll_apple_subscription",
|
||||||
"sanity_check",
|
"sanity_check",
|
||||||
"delete_old_monitoring",
|
"delete_old_monitoring",
|
||||||
@ -1206,6 +1274,9 @@ if __name__ == "__main__":
|
|||||||
elif args.job == "delete_logs":
|
elif args.job == "delete_logs":
|
||||||
LOG.d("Deleted Logs")
|
LOG.d("Deleted Logs")
|
||||||
delete_logs()
|
delete_logs()
|
||||||
|
elif args.job == "delete_old_data":
|
||||||
|
LOG.d("Delete old data")
|
||||||
|
delete_old_data()
|
||||||
elif args.job == "poll_apple_subscription":
|
elif args.job == "poll_apple_subscription":
|
||||||
LOG.d("Poll Apple Subscriptions")
|
LOG.d("Poll Apple Subscriptions")
|
||||||
poll_apple_subscription()
|
poll_apple_subscription()
|
||||||
|
@ -37,6 +37,12 @@ jobs:
|
|||||||
schedule: "15 5 * * *"
|
schedule: "15 5 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Delete Old data
|
||||||
|
command: python /code/cron.py -j delete_old_data
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "30 5 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin Poll Apple Subscriptions
|
- name: SimpleLogin Poll Apple Subscriptions
|
||||||
command: python /code/cron.py -j poll_apple_subscription
|
command: python /code/cron.py -j poll_apple_subscription
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
|
@ -875,6 +875,7 @@ def forward_email_to_mailbox(
|
|||||||
# References and In-Reply-To are used for keeping the email thread
|
# References and In-Reply-To are used for keeping the email thread
|
||||||
headers.REFERENCES,
|
headers.REFERENCES,
|
||||||
headers.IN_REPLY_TO,
|
headers.IN_REPLY_TO,
|
||||||
|
headers.SL_QUEUE_ID,
|
||||||
headers.LIST_UNSUBSCRIBE,
|
headers.LIST_UNSUBSCRIBE,
|
||||||
headers.LIST_UNSUBSCRIBE_POST,
|
headers.LIST_UNSUBSCRIBE_POST,
|
||||||
] + headers.MIME_HEADERS
|
] + headers.MIME_HEADERS
|
||||||
@ -2040,10 +2041,11 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
|||||||
return status.E204
|
return status.E204
|
||||||
|
|
||||||
# sanitize email headers
|
# sanitize email headers
|
||||||
sanitize_header(msg, "from")
|
sanitize_header(msg, headers.FROM)
|
||||||
sanitize_header(msg, "to")
|
sanitize_header(msg, headers.TO)
|
||||||
sanitize_header(msg, "cc")
|
sanitize_header(msg, headers.CC)
|
||||||
sanitize_header(msg, "reply-to")
|
sanitize_header(msg, headers.REPLY_TO)
|
||||||
|
sanitize_header(msg, headers.MESSAGE_ID)
|
||||||
|
|
||||||
LOG.d(
|
LOG.d(
|
||||||
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "
|
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "
|
||||||
|
37
app/oneshot/emulate_dummy_load.py
Normal file
37
app/oneshot/emulate_dummy_load.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.models import Alias, Contact
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog=f"Replace {config.NOREPLY}",
|
||||||
|
description=f"Replace {config.NOREPLY} from contacts reply email",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
max_alias_id: int = Session.query(func.max(Alias.id)).scalar()
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
tests = 1000
|
||||||
|
for i in range(tests):
|
||||||
|
alias = (
|
||||||
|
Alias.filter(Alias.id > int(random.random() * max_alias_id))
|
||||||
|
.order_by(Alias.id.asc())
|
||||||
|
.limit(1)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first()
|
||||||
|
mailboxes = alias.mailboxes
|
||||||
|
user = alias.user
|
||||||
|
if i % 10:
|
||||||
|
print("{i} -> {alias.id}")
|
||||||
|
|
||||||
|
end = time.time()
|
||||||
|
time_taken = end - start
|
||||||
|
print(f"Took {time_taken} -> {time_taken/tests} per test")
|
@ -1,29 +1,56 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import Alias, SLDomain
|
from app.models import Alias, SLDomain
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog="Mark partner created aliases with the PARTNER_CREATED flag",
|
prog="Mark partner created aliases with the PARTNER_CREATED flag",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
|
||||||
|
)
|
||||||
|
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
alias_id_start = args.start_alias_id
|
||||||
|
max_alias_id = args.end_alias_id
|
||||||
|
if max_alias_id == 0:
|
||||||
|
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||||
|
|
||||||
|
print(f"Updating aliases from {alias_id_start} to {max_alias_id}")
|
||||||
|
|
||||||
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
|
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
|
||||||
|
cond = [f"email like '%{domain.domain}'" for domain in domains]
|
||||||
|
sql_or_cond = " OR ".join(cond)
|
||||||
|
sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})"
|
||||||
|
print(sql)
|
||||||
|
|
||||||
for domain in domains:
|
step = 1000
|
||||||
LOG.i(f"Checking aliases for domain {domain.domain}")
|
updated = 0
|
||||||
for alias in (
|
start_time = time.time()
|
||||||
Alias.filter(
|
for batch_start in range(alias_id_start, max_alias_id, step):
|
||||||
Alias.email.like(f"%{domain.domain}"),
|
updated += Session.execute(
|
||||||
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0,
|
sql,
|
||||||
|
{
|
||||||
|
"start": batch_start,
|
||||||
|
"end": batch_start + step,
|
||||||
|
"flag": Alias.FLAG_PARTNER_CREATED,
|
||||||
|
},
|
||||||
|
).rowcount
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
time_per_alias = elapsed / (batch_start - alias_id_start + step)
|
||||||
|
last_batch_id = batch_start + step
|
||||||
|
remaining = max_alias_id - last_batch_id
|
||||||
|
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
|
||||||
|
hours_remaining = time_remaining / 3600.0
|
||||||
|
percent = int(
|
||||||
|
((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start)
|
||||||
)
|
)
|
||||||
.enable_eagerloads(False)
|
print(
|
||||||
.yield_per(100)
|
f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining"
|
||||||
.all()
|
)
|
||||||
):
|
print(f"Updated aliases up to {max_alias_id}")
|
||||||
alias.flags = alias.flags | Alias.FLAG_PARTNER_CREATED
|
|
||||||
LOG.i(f" * Updating {alias.email} to {alias.flags}")
|
|
||||||
Session.commit()
|
|
||||||
|
0
app/tasks/__init__.py
Normal file
0
app/tasks/__init__.py
Normal file
19
app/tasks/cleanup_old_imports.py
Normal file
19
app/tasks/cleanup_old_imports.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app import s3
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import BatchImport
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_old_imports(oldest_allowed: arrow.Arrow):
|
||||||
|
LOG.i(f"Deleting imports older than {oldest_allowed}")
|
||||||
|
for batch_import in (
|
||||||
|
BatchImport.filter(BatchImport.created_at < oldest_allowed).yield_per(500).all()
|
||||||
|
):
|
||||||
|
LOG.i(
|
||||||
|
f"Deleting batch import {batch_import} with file {batch_import.file.path}"
|
||||||
|
)
|
||||||
|
file = batch_import.file
|
||||||
|
if file is not None:
|
||||||
|
s3.delete(file.path)
|
||||||
|
BatchImport.delete(batch_import.id, commit=True)
|
24
app/tasks/cleanup_old_jobs.py
Normal file
24
app/tasks/cleanup_old_jobs.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import arrow
|
||||||
|
from sqlalchemy import or_, and_
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.db import Session
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import Job, JobState
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_old_jobs(oldest_allowed: arrow.Arrow):
|
||||||
|
LOG.i(f"Deleting jobs older than {oldest_allowed}")
|
||||||
|
count = Job.filter(
|
||||||
|
or_(
|
||||||
|
Job.state == JobState.done.value,
|
||||||
|
Job.state == JobState.error.value,
|
||||||
|
and_(
|
||||||
|
Job.state == JobState.taken.value,
|
||||||
|
Job.attempts >= config.JOB_MAX_ATTEMPTS,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
Job.updated_at < oldest_allowed,
|
||||||
|
).delete()
|
||||||
|
Session.commit()
|
||||||
|
LOG.i(f"Deleted {count} jobs")
|
12
app/tasks/cleanup_old_notifications.py
Normal file
12
app/tasks/cleanup_old_notifications.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import Notification
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_old_notifications(oldest_allowed: arrow.Arrow):
|
||||||
|
LOG.i(f"Deleting notifications older than {oldest_allowed}")
|
||||||
|
count = Notification.filter(Notification.created_at < oldest_allowed).delete()
|
||||||
|
Session.commit()
|
||||||
|
LOG.i(f"Deleted {count} notifications")
|
@ -120,21 +120,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<!-- END WebAuthn -->
|
<!-- END WebAuthn -->
|
||||||
<!-- Alias import/export -->
|
|
||||||
<div class="card">
|
|
||||||
<div class="card-body">
|
|
||||||
<div class="card-title">Alias import/export</div>
|
|
||||||
<div class="mb-3">
|
|
||||||
You can import your aliases created on other platforms into SimpleLogin.
|
|
||||||
You can also export your aliases to a readable csv format for a future batch import.
|
|
||||||
</div>
|
|
||||||
<a href="{{ url_for('dashboard.batch_import_route') }}"
|
|
||||||
class="btn btn-outline-primary">Batch Import</a>
|
|
||||||
<a href="{{ url_for('dashboard.alias_export_route') }}"
|
|
||||||
class="btn btn-outline-secondary">Export Aliases</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- END Alias import/export -->
|
|
||||||
<!-- data export -->
|
<!-- data export -->
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
|
@ -559,7 +559,7 @@
|
|||||||
sender address.
|
sender address.
|
||||||
<br />
|
<br />
|
||||||
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
|
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
|
||||||
and the original From header is stored in <b>X-SimpleLogin-Original-From<b>.
|
and the original From header is stored in <b>X-SimpleLogin-Original-From</b>.
|
||||||
You can choose to display this header in your email client.
|
You can choose to display this header in your email client.
|
||||||
<br />
|
<br />
|
||||||
As email headers aren't encrypted, your mailbox service can know the sender address via this header.
|
As email headers aren't encrypted, your mailbox service can know the sender address via this header.
|
||||||
@ -583,6 +583,23 @@
|
|||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<!-- Alias import/export -->
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="card-title">
|
||||||
|
Alias import/export
|
||||||
|
</div>
|
||||||
|
<div class="mb-3">
|
||||||
|
You can import your aliases created on other platforms into SimpleLogin.
|
||||||
|
You can also export your aliases to a readable csv format for a future batch import.
|
||||||
|
</div>
|
||||||
|
<a href="{{ url_for('dashboard.batch_import_route') }}"
|
||||||
|
class="btn btn-outline-primary">Batch Import</a>
|
||||||
|
<a href="{{ url_for('dashboard.alias_export_route') }}"
|
||||||
|
class="btn btn-outline-secondary">Export Aliases</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- END Alias import/export -->
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block script %}
|
{% block script %}
|
||||||
|
0
app/tests/cron/__init__.py
Normal file
0
app/tests/cron/__init__.py
Normal file
142
app/tests/cron/test_get_alias_for_hibp.py
Normal file
142
app/tests/cron/test_get_alias_for_hibp.py
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
import arrow
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import cron
|
||||||
|
from app.db import Session
|
||||||
|
from app.models import (
|
||||||
|
Alias,
|
||||||
|
AppleSubscription,
|
||||||
|
PlanEnum,
|
||||||
|
CoinbaseSubscription,
|
||||||
|
ManualSubscription,
|
||||||
|
Subscription,
|
||||||
|
PartnerUser,
|
||||||
|
PartnerSubscription,
|
||||||
|
User,
|
||||||
|
)
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
|
from tests.utils import create_new_user, random_token
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_for_free_user_has_no_alias():
|
||||||
|
user = create_new_user()
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_for_lifetime_with_null_hibp_date():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert alias_id == aliases[0].id
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_for_lifetime_with_old_hibp_date():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
alias = Alias.create_new_random(user)
|
||||||
|
alias.hibp_last_check = arrow.now().shift(days=-1)
|
||||||
|
alias_id = alias.id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert alias_id == aliases[0].id
|
||||||
|
|
||||||
|
|
||||||
|
def create_partner_sub(user: User):
|
||||||
|
pu = PartnerUser.create(
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
partner_email=user.email,
|
||||||
|
external_user_id=random_token(10),
|
||||||
|
user_id=user.id,
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
PartnerSubscription.create(
|
||||||
|
partner_user_id=pu.id, end_at=arrow.utcnow().shift(days=15)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
sub_generator_list = [
|
||||||
|
lambda u: AppleSubscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
expires_date=arrow.now().shift(days=15),
|
||||||
|
original_transaction_id=random_token(10),
|
||||||
|
receipt_data=random_token(10),
|
||||||
|
plan=PlanEnum.monthly,
|
||||||
|
),
|
||||||
|
lambda u: CoinbaseSubscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
end_at=arrow.now().shift(days=15),
|
||||||
|
),
|
||||||
|
lambda u: ManualSubscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
end_at=arrow.now().shift(days=15),
|
||||||
|
),
|
||||||
|
lambda u: Subscription.create(
|
||||||
|
user_id=u.id,
|
||||||
|
cancel_url="",
|
||||||
|
update_url="",
|
||||||
|
subscription_id=random_token(10),
|
||||||
|
event_time=arrow.now(),
|
||||||
|
next_bill_date=arrow.now().shift(days=15).date(),
|
||||||
|
plan=PlanEnum.monthly,
|
||||||
|
),
|
||||||
|
create_partner_sub,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("sub_generator", sub_generator_list)
|
||||||
|
def test_get_alias_for_sub(sub_generator):
|
||||||
|
user = create_new_user()
|
||||||
|
sub_generator(user)
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert alias_id == aliases[0].id
|
||||||
|
|
||||||
|
|
||||||
|
def test_disabled_user_is_not_checked():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
user.disabled = True
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_skipped_user_is_not_checked():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
alias_id = Alias.create_new_random(user).id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_already_checked_is_not_checked():
|
||||||
|
user = create_new_user()
|
||||||
|
user.lifetime = True
|
||||||
|
alias = Alias.create_new_random(user)
|
||||||
|
alias.hibp_last_check = arrow.now().shift(days=1)
|
||||||
|
alias_id = alias.id
|
||||||
|
Session.commit()
|
||||||
|
aliases = list(
|
||||||
|
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
|
||||||
|
)
|
||||||
|
assert len(aliases) == 0
|
0
app/tests/tasks/__init__.py
Normal file
0
app/tests/tasks/__init__.py
Normal file
35
app/tests/tasks/test_cleanup_old_imports.py
Normal file
35
app/tests/tasks/test_cleanup_old_imports.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import tempfile
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
|
||||||
|
from app import s3, config
|
||||||
|
from app.models import File, BatchImport
|
||||||
|
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||||
|
from tests.utils import random_token, create_new_user
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup_old_imports():
|
||||||
|
BatchImport.filter().delete()
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
config.UPLOAD_DIR = tmpdir
|
||||||
|
user = create_new_user()
|
||||||
|
path = random_token()
|
||||||
|
s3.upload_from_bytesio(path, BytesIO("data".encode("utf-8")))
|
||||||
|
file = File.create(path=path, commit=True) # noqa: F821
|
||||||
|
now = arrow.now()
|
||||||
|
delete_batch_import_id = BatchImport.create(
|
||||||
|
user_id=user.id,
|
||||||
|
file_id=file.id,
|
||||||
|
created_at=now.shift(minutes=-1),
|
||||||
|
flush=True,
|
||||||
|
).id
|
||||||
|
keep_batch_import_id = BatchImport.create(
|
||||||
|
user_id=user.id,
|
||||||
|
file_id=file.id,
|
||||||
|
created_at=now.shift(minutes=+1),
|
||||||
|
commit=True,
|
||||||
|
).id
|
||||||
|
cleanup_old_imports(now)
|
||||||
|
assert BatchImport.get(id=delete_batch_import_id) is None
|
||||||
|
assert BatchImport.get(id=keep_batch_import_id) is not None
|
72
app/tests/tasks/test_cleanup_old_jobs.py
Normal file
72
app/tests/tasks/test_cleanup_old_jobs.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.models import Job, JobState
|
||||||
|
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup_old_jobs():
|
||||||
|
Job.filter().delete()
|
||||||
|
now = arrow.now()
|
||||||
|
delete_ids = [
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=-1),
|
||||||
|
state=JobState.done.value,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=-1),
|
||||||
|
state=JobState.error.value,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=-1),
|
||||||
|
state=JobState.taken.value,
|
||||||
|
attempts=config.JOB_MAX_ATTEMPTS,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
]
|
||||||
|
|
||||||
|
keep_ids = [
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=+1),
|
||||||
|
state=JobState.done.value,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=+1),
|
||||||
|
state=JobState.error.value,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=+1),
|
||||||
|
state=JobState.taken.value,
|
||||||
|
attempts=config.JOB_MAX_ATTEMPTS,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
Job.create(
|
||||||
|
updated_at=now.shift(minutes=-1),
|
||||||
|
state=JobState.taken.value,
|
||||||
|
attempts=config.JOB_MAX_ATTEMPTS - 1,
|
||||||
|
name="",
|
||||||
|
payload="",
|
||||||
|
flush=True,
|
||||||
|
).id,
|
||||||
|
]
|
||||||
|
cleanup_old_jobs(now)
|
||||||
|
for delete_id in delete_ids:
|
||||||
|
assert Job.get(id=delete_id) is None
|
||||||
|
for keep_id in keep_ids:
|
||||||
|
assert Job.get(id=keep_id) is not None
|
26
app/tests/tasks/test_cleanup_old_notifications.py
Normal file
26
app/tests/tasks/test_cleanup_old_notifications.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app.models import Notification
|
||||||
|
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||||
|
from tests.utils import create_new_user
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup_old_notifications():
|
||||||
|
Notification.filter().delete()
|
||||||
|
user = create_new_user()
|
||||||
|
now = arrow.now()
|
||||||
|
delete_id = Notification.create(
|
||||||
|
user_id=user.id,
|
||||||
|
created_at=now.shift(minutes=-1),
|
||||||
|
message="",
|
||||||
|
flush=True,
|
||||||
|
).id
|
||||||
|
keep_id = Notification.create(
|
||||||
|
user_id=user.id,
|
||||||
|
created_at=now.shift(minutes=+1),
|
||||||
|
message="",
|
||||||
|
flush=True,
|
||||||
|
).id
|
||||||
|
cleanup_old_notifications(now)
|
||||||
|
assert Notification.get(id=delete_id) is None
|
||||||
|
assert Notification.get(id=keep_id) is not None
|
@ -384,3 +384,30 @@ def test_break_loop_alias_as_mailbox(flask_client):
|
|||||||
msg[headers.SUBJECT] = random_string()
|
msg[headers.SUBJECT] = random_string()
|
||||||
result = email_handler.handle(envelope, msg)
|
result = email_handler.handle(envelope, msg)
|
||||||
assert result == status.E525
|
assert result == status.E525
|
||||||
|
|
||||||
|
|
||||||
|
@mail_sender.store_emails_test_decorator
|
||||||
|
def test_preserve_headers(flask_client):
|
||||||
|
headers_to_keep = [
|
||||||
|
headers.SUBJECT,
|
||||||
|
headers.DATE,
|
||||||
|
headers.MESSAGE_ID,
|
||||||
|
headers.REFERENCES,
|
||||||
|
headers.IN_REPLY_TO,
|
||||||
|
headers.SL_QUEUE_ID,
|
||||||
|
] + headers.MIME_HEADERS
|
||||||
|
user = create_new_user()
|
||||||
|
alias = Alias.create_new_random(user)
|
||||||
|
envelope = Envelope()
|
||||||
|
envelope.mail_from = "somewhere@lo.cal"
|
||||||
|
envelope.rcpt_tos = [alias.email]
|
||||||
|
msg = EmailMessage()
|
||||||
|
for header in headers_to_keep:
|
||||||
|
msg[header] = header + "keep"
|
||||||
|
result = email_handler.handle(envelope, msg)
|
||||||
|
assert result == status.E200
|
||||||
|
sent_mails = mail_sender.get_stored_emails()
|
||||||
|
assert len(sent_mails) == 1
|
||||||
|
msg = sent_mails[0].msg
|
||||||
|
for header in headers_to_keep:
|
||||||
|
assert msg[header] == header + "keep"
|
||||||
|
Reference in New Issue
Block a user