Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
24ba25ab6a | |||
78184eeae4 | |||
c111fbe8e1 |
@ -308,28 +308,29 @@ def delete_alias(alias: Alias, user: User):
|
||||
Delete an alias and add it to either global or domain trash
|
||||
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
|
||||
"""
|
||||
# save deleted alias to either global or domain trash
|
||||
LOG.i(f"User {user} has deleted alias {alias}")
|
||||
# save deleted alias to either global or domain tra
|
||||
if alias.custom_domain_id:
|
||||
if not DomainDeletedAlias.get_by(
|
||||
email=alias.email, domain_id=alias.custom_domain_id
|
||||
):
|
||||
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
|
||||
Session.add(
|
||||
DomainDeletedAlias(
|
||||
user_id=user.id,
|
||||
email=alias.email,
|
||||
domain_id=alias.custom_domain_id,
|
||||
)
|
||||
domain_deleted_alias = DomainDeletedAlias(
|
||||
user_id=user.id,
|
||||
email=alias.email,
|
||||
domain_id=alias.custom_domain_id,
|
||||
)
|
||||
Session.add(domain_deleted_alias)
|
||||
Session.commit()
|
||||
|
||||
LOG.i(
|
||||
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
|
||||
)
|
||||
else:
|
||||
if not DeletedAlias.get_by(email=alias.email):
|
||||
LOG.d("add %s to global trash", alias)
|
||||
Session.add(DeletedAlias(email=alias.email))
|
||||
deleted_alias = DeletedAlias(email=alias.email)
|
||||
Session.add(deleted_alias)
|
||||
Session.commit()
|
||||
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
|
||||
|
||||
LOG.i("delete alias %s", alias)
|
||||
Alias.filter(Alias.id == alias.id).delete()
|
||||
Session.commit()
|
||||
|
||||
|
@ -434,6 +434,8 @@ HIBP_MAX_ALIAS_CHECK = 10_000
|
||||
HIBP_RPM = 100
|
||||
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
||||
|
||||
KEEP_OLD_DATA_DAYS = 30
|
||||
|
||||
POSTMASTER = os.environ.get("POSTMASTER")
|
||||
|
||||
# store temporary files, especially for debugging
|
||||
|
@ -14,7 +14,7 @@ from app.models import PartnerUser, SocialAuth
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
_SUDO_GAP = 900
|
||||
_SUDO_GAP = 120
|
||||
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
|
@ -141,7 +141,7 @@ def index():
|
||||
)
|
||||
|
||||
if request.form.get("form-name") == "delete-alias":
|
||||
LOG.d("delete alias %s", alias)
|
||||
LOG.i(f"User {current_user} requested deletion of alias {alias}")
|
||||
email = alias.email
|
||||
alias_utils.delete_alias(alias, current_user)
|
||||
flash(f"Alias {email} has been deleted", "success")
|
||||
|
@ -21,6 +21,7 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||
RETURN_PATH = "Return-Path"
|
||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||
SL_QUEUE_ID = "X-SL-Queue-Id"
|
||||
|
||||
# headers used to DKIM sign in order of preference
|
||||
DKIM_HEADERS = [
|
||||
|
@ -494,9 +494,10 @@ def delete_header(msg: Message, header: str):
|
||||
|
||||
def sanitize_header(msg: Message, header: str):
|
||||
"""remove trailing space and remove linebreak from a header"""
|
||||
header_lowercase = header.lower()
|
||||
for i in reversed(range(len(msg._headers))):
|
||||
header_name = msg._headers[i][0].lower()
|
||||
if header_name == header.lower():
|
||||
if header_name == header_lowercase:
|
||||
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
|
||||
if msg._headers[i][1]:
|
||||
msg._headers[i] = (
|
||||
|
@ -30,7 +30,9 @@ def apply_dmarc_policy_for_forward_phase(
|
||||
) -> Tuple[Message, Optional[str]]:
|
||||
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
|
||||
if not DMARC_CHECK_ENABLED or not spam_result:
|
||||
LOG.i("DMARC check disabled")
|
||||
return msg, None
|
||||
LOG.i(f"Spam check result in {spam_result}")
|
||||
|
||||
from_header = get_header_unicode(msg[headers.FROM])
|
||||
|
||||
@ -150,8 +152,10 @@ def apply_dmarc_policy_for_reply_phase(
|
||||
) -> Optional[str]:
|
||||
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
|
||||
if not DMARC_CHECK_ENABLED or not spam_result:
|
||||
LOG.i("DMARC check disabled")
|
||||
return None
|
||||
|
||||
LOG.i(f"Spam check result is {spam_result}")
|
||||
if spam_result.dmarc not in (
|
||||
DmarcCheckResult.quarantine,
|
||||
DmarcCheckResult.reject,
|
||||
|
@ -30,7 +30,10 @@ def handle_batch_import(batch_import: BatchImport):
|
||||
|
||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||
r = requests.get(file_url)
|
||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
||||
# Replace invisible character
|
||||
lines = [
|
||||
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
|
||||
]
|
||||
|
||||
import_from_csv(batch_import, user, lines)
|
||||
|
||||
|
@ -30,7 +30,9 @@ def check_bucket_limit(
|
||||
try:
|
||||
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
|
||||
if value > max_hits:
|
||||
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
|
||||
LOG.i(
|
||||
f"Rate limit hit for {lock_name} (bucket id {bucket_id}) -> {value}/{max_hits}"
|
||||
)
|
||||
newrelic.agent.record_custom_event(
|
||||
"BucketRateLimit",
|
||||
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
|
||||
|
@ -5,19 +5,9 @@ from typing import Optional
|
||||
import boto3
|
||||
import requests
|
||||
|
||||
from app.config import (
|
||||
AWS_REGION,
|
||||
BUCKET,
|
||||
AWS_ACCESS_KEY_ID,
|
||||
AWS_SECRET_ACCESS_KEY,
|
||||
LOCAL_FILE_UPLOAD,
|
||||
UPLOAD_DIR,
|
||||
URL,
|
||||
AWS_ENDPOINT_URL,
|
||||
)
|
||||
from app import config
|
||||
from app.log import LOG
|
||||
|
||||
|
||||
_s3_client = None
|
||||
|
||||
|
||||
@ -25,12 +15,12 @@ def _get_s3client():
|
||||
global _s3_client
|
||||
if _s3_client is None:
|
||||
args = {
|
||||
"aws_access_key_id": AWS_ACCESS_KEY_ID,
|
||||
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
|
||||
"region_name": AWS_REGION,
|
||||
"aws_access_key_id": config.AWS_ACCESS_KEY_ID,
|
||||
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY,
|
||||
"region_name": config.AWS_REGION,
|
||||
}
|
||||
if AWS_ENDPOINT_URL:
|
||||
args["endpoint_url"] = AWS_ENDPOINT_URL
|
||||
if config.AWS_ENDPOINT_URL:
|
||||
args["endpoint_url"] = config.AWS_ENDPOINT_URL
|
||||
_s3_client = boto3.client("s3", **args)
|
||||
return _s3_client
|
||||
|
||||
@ -38,8 +28,8 @@ def _get_s3client():
|
||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
||||
bs.seek(0)
|
||||
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(UPLOAD_DIR, key)
|
||||
if config.LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(config.UPLOAD_DIR, key)
|
||||
file_dir = os.path.dirname(file_path)
|
||||
os.makedirs(file_dir, exist_ok=True)
|
||||
with open(file_path, "wb") as f:
|
||||
@ -47,7 +37,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
|
||||
|
||||
else:
|
||||
_get_s3client().put_object(
|
||||
Bucket=BUCKET,
|
||||
Bucket=config.BUCKET,
|
||||
Key=key,
|
||||
Body=bs,
|
||||
ContentType=content_type,
|
||||
@ -57,8 +47,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
|
||||
def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
||||
bs.seek(0)
|
||||
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(UPLOAD_DIR, path)
|
||||
if config.LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(config.UPLOAD_DIR, path)
|
||||
file_dir = os.path.dirname(file_path)
|
||||
os.makedirs(file_dir, exist_ok=True)
|
||||
with open(file_path, "wb") as f:
|
||||
@ -66,7 +56,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
||||
|
||||
else:
|
||||
_get_s3client().put_object(
|
||||
Bucket=BUCKET,
|
||||
Bucket=config.BUCKET,
|
||||
Key=path,
|
||||
Body=bs,
|
||||
# Support saving a remote file using Http header
|
||||
@ -77,12 +67,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
||||
|
||||
|
||||
def download_email(path: str) -> Optional[str]:
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(UPLOAD_DIR, path)
|
||||
if config.LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(config.UPLOAD_DIR, path)
|
||||
with open(file_path, "rb") as f:
|
||||
return f.read()
|
||||
resp = _get_s3client().get_object(
|
||||
Bucket=BUCKET,
|
||||
Bucket=config.BUCKET,
|
||||
Key=path,
|
||||
)
|
||||
if not resp or "Body" not in resp:
|
||||
@ -96,29 +86,30 @@ def upload_from_url(url: str, upload_path):
|
||||
|
||||
|
||||
def get_url(key: str, expires_in=3600) -> str:
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
return URL + "/static/upload/" + key
|
||||
if config.LOCAL_FILE_UPLOAD:
|
||||
return config.URL + "/static/upload/" + key
|
||||
else:
|
||||
return _get_s3client().generate_presigned_url(
|
||||
ExpiresIn=expires_in,
|
||||
ClientMethod="get_object",
|
||||
Params={"Bucket": BUCKET, "Key": key},
|
||||
Params={"Bucket": config.BUCKET, "Key": key},
|
||||
)
|
||||
|
||||
|
||||
def delete(path: str):
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
os.remove(os.path.join(UPLOAD_DIR, path))
|
||||
if config.LOCAL_FILE_UPLOAD:
|
||||
file_path = os.path.join(config.UPLOAD_DIR, path)
|
||||
os.remove(file_path)
|
||||
else:
|
||||
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
|
||||
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path)
|
||||
|
||||
|
||||
def create_bucket_if_not_exists():
|
||||
s3client = _get_s3client()
|
||||
buckets = s3client.list_buckets()
|
||||
for bucket in buckets["Buckets"]:
|
||||
if bucket["Name"] == BUCKET:
|
||||
if bucket["Name"] == config.BUCKET:
|
||||
LOG.i("Bucket already exists")
|
||||
return
|
||||
s3client.create_bucket(Bucket=BUCKET)
|
||||
LOG.i(f"Bucket {BUCKET} created")
|
||||
s3client.create_bucket(Bucket=config.BUCKET)
|
||||
LOG.i(f"Bucket {config.BUCKET} created")
|
||||
|
14
app/cron.py
14
app/cron.py
@ -61,6 +61,9 @@ from app.pgp_utils import load_public_key_and_check, PGPException
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import sanitize_email
|
||||
from server import create_light_app
|
||||
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||
|
||||
DELETE_GRACE_DAYS = 30
|
||||
|
||||
@ -1221,6 +1224,13 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||
Session.commit()
|
||||
|
||||
|
||||
def delete_old_data():
|
||||
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
|
||||
cleanup_old_imports(oldest_valid)
|
||||
cleanup_old_jobs(oldest_valid)
|
||||
cleanup_old_notifications(oldest_valid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
LOG.d("Start running cronjob")
|
||||
parser = argparse.ArgumentParser()
|
||||
@ -1235,6 +1245,7 @@ if __name__ == "__main__":
|
||||
"notify_manual_subscription_end",
|
||||
"notify_premium_end",
|
||||
"delete_logs",
|
||||
"delete_old_data",
|
||||
"poll_apple_subscription",
|
||||
"sanity_check",
|
||||
"delete_old_monitoring",
|
||||
@ -1263,6 +1274,9 @@ if __name__ == "__main__":
|
||||
elif args.job == "delete_logs":
|
||||
LOG.d("Deleted Logs")
|
||||
delete_logs()
|
||||
elif args.job == "delete_old_data":
|
||||
LOG.d("Delete old data")
|
||||
delete_old_data()
|
||||
elif args.job == "poll_apple_subscription":
|
||||
LOG.d("Poll Apple Subscriptions")
|
||||
poll_apple_subscription()
|
||||
|
@ -37,6 +37,12 @@ jobs:
|
||||
schedule: "15 5 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Old data
|
||||
command: python /code/cron.py -j delete_old_data
|
||||
shell: /bin/bash
|
||||
schedule: "30 5 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
|
@ -875,6 +875,7 @@ def forward_email_to_mailbox(
|
||||
# References and In-Reply-To are used for keeping the email thread
|
||||
headers.REFERENCES,
|
||||
headers.IN_REPLY_TO,
|
||||
headers.SL_QUEUE_ID,
|
||||
headers.LIST_UNSUBSCRIBE,
|
||||
headers.LIST_UNSUBSCRIBE_POST,
|
||||
] + headers.MIME_HEADERS
|
||||
@ -2040,10 +2041,11 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
||||
return status.E204
|
||||
|
||||
# sanitize email headers
|
||||
sanitize_header(msg, "from")
|
||||
sanitize_header(msg, "to")
|
||||
sanitize_header(msg, "cc")
|
||||
sanitize_header(msg, "reply-to")
|
||||
sanitize_header(msg, headers.FROM)
|
||||
sanitize_header(msg, headers.TO)
|
||||
sanitize_header(msg, headers.CC)
|
||||
sanitize_header(msg, headers.REPLY_TO)
|
||||
sanitize_header(msg, headers.MESSAGE_ID)
|
||||
|
||||
LOG.d(
|
||||
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "
|
||||
|
0
app/tasks/__init__.py
Normal file
0
app/tasks/__init__.py
Normal file
19
app/tasks/cleanup_old_imports.py
Normal file
19
app/tasks/cleanup_old_imports.py
Normal file
@ -0,0 +1,19 @@
|
||||
import arrow
|
||||
|
||||
from app import s3
|
||||
from app.log import LOG
|
||||
from app.models import BatchImport
|
||||
|
||||
|
||||
def cleanup_old_imports(oldest_allowed: arrow.Arrow):
|
||||
LOG.i(f"Deleting imports older than {oldest_allowed}")
|
||||
for batch_import in (
|
||||
BatchImport.filter(BatchImport.created_at < oldest_allowed).yield_per(500).all()
|
||||
):
|
||||
LOG.i(
|
||||
f"Deleting batch import {batch_import} with file {batch_import.file.path}"
|
||||
)
|
||||
file = batch_import.file
|
||||
if file is not None:
|
||||
s3.delete(file.path)
|
||||
BatchImport.delete(batch_import.id, commit=True)
|
24
app/tasks/cleanup_old_jobs.py
Normal file
24
app/tasks/cleanup_old_jobs.py
Normal file
@ -0,0 +1,24 @@
|
||||
import arrow
|
||||
from sqlalchemy import or_, and_
|
||||
|
||||
from app import config
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import Job, JobState
|
||||
|
||||
|
||||
def cleanup_old_jobs(oldest_allowed: arrow.Arrow):
|
||||
LOG.i(f"Deleting jobs older than {oldest_allowed}")
|
||||
count = Job.filter(
|
||||
or_(
|
||||
Job.state == JobState.done.value,
|
||||
Job.state == JobState.error.value,
|
||||
and_(
|
||||
Job.state == JobState.taken.value,
|
||||
Job.attempts >= config.JOB_MAX_ATTEMPTS,
|
||||
),
|
||||
),
|
||||
Job.updated_at < oldest_allowed,
|
||||
).delete()
|
||||
Session.commit()
|
||||
LOG.i(f"Deleted {count} jobs")
|
12
app/tasks/cleanup_old_notifications.py
Normal file
12
app/tasks/cleanup_old_notifications.py
Normal file
@ -0,0 +1,12 @@
|
||||
import arrow
|
||||
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import Notification
|
||||
|
||||
|
||||
def cleanup_old_notifications(oldest_allowed: arrow.Arrow):
|
||||
LOG.i(f"Deleting notifications older than {oldest_allowed}")
|
||||
count = Notification.filter(Notification.created_at < oldest_allowed).delete()
|
||||
Session.commit()
|
||||
LOG.i(f"Deleted {count} notifications")
|
@ -28,7 +28,7 @@ def test_get_alias_for_free_user_has_no_alias():
|
||||
assert len(aliases) == 0
|
||||
|
||||
|
||||
def test_get_alias_for_lifetime():
|
||||
def test_get_alias_for_lifetime_with_null_hibp_date():
|
||||
user = create_new_user()
|
||||
user.lifetime = True
|
||||
alias_id = Alias.create_new_random(user).id
|
||||
@ -39,6 +39,19 @@ def test_get_alias_for_lifetime():
|
||||
assert alias_id == aliases[0].id
|
||||
|
||||
|
||||
def test_get_alias_for_lifetime_with_old_hibp_date():
|
||||
user = create_new_user()
|
||||
user.lifetime = True
|
||||
alias = Alias.create_new_random(user)
|
||||
alias.hibp_last_check = arrow.now().shift(days=-1)
|
||||
alias_id = alias.id
|
||||
Session.commit()
|
||||
aliases = list(
|
||||
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
|
||||
)
|
||||
assert alias_id == aliases[0].id
|
||||
|
||||
|
||||
def create_partner_sub(user: User):
|
||||
pu = PartnerUser.create(
|
||||
partner_id=get_proton_partner().id,
|
||||
@ -114,3 +127,16 @@ def test_skipped_user_is_not_checked():
|
||||
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
|
||||
)
|
||||
assert len(aliases) == 0
|
||||
|
||||
|
||||
def test_already_checked_is_not_checked():
|
||||
user = create_new_user()
|
||||
user.lifetime = True
|
||||
alias = Alias.create_new_random(user)
|
||||
alias.hibp_last_check = arrow.now().shift(days=1)
|
||||
alias_id = alias.id
|
||||
Session.commit()
|
||||
aliases = list(
|
||||
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
|
||||
)
|
||||
assert len(aliases) == 0
|
||||
|
0
app/tests/tasks/__init__.py
Normal file
0
app/tests/tasks/__init__.py
Normal file
35
app/tests/tasks/test_cleanup_old_imports.py
Normal file
35
app/tests/tasks/test_cleanup_old_imports.py
Normal file
@ -0,0 +1,35 @@
|
||||
import tempfile
|
||||
from io import BytesIO
|
||||
|
||||
import arrow
|
||||
|
||||
from app import s3, config
|
||||
from app.models import File, BatchImport
|
||||
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||
from tests.utils import random_token, create_new_user
|
||||
|
||||
|
||||
def test_cleanup_old_imports():
|
||||
BatchImport.filter().delete()
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
config.UPLOAD_DIR = tmpdir
|
||||
user = create_new_user()
|
||||
path = random_token()
|
||||
s3.upload_from_bytesio(path, BytesIO("data".encode("utf-8")))
|
||||
file = File.create(path=path, commit=True) # noqa: F821
|
||||
now = arrow.now()
|
||||
delete_batch_import_id = BatchImport.create(
|
||||
user_id=user.id,
|
||||
file_id=file.id,
|
||||
created_at=now.shift(minutes=-1),
|
||||
flush=True,
|
||||
).id
|
||||
keep_batch_import_id = BatchImport.create(
|
||||
user_id=user.id,
|
||||
file_id=file.id,
|
||||
created_at=now.shift(minutes=+1),
|
||||
commit=True,
|
||||
).id
|
||||
cleanup_old_imports(now)
|
||||
assert BatchImport.get(id=delete_batch_import_id) is None
|
||||
assert BatchImport.get(id=keep_batch_import_id) is not None
|
72
app/tests/tasks/test_cleanup_old_jobs.py
Normal file
72
app/tests/tasks/test_cleanup_old_jobs.py
Normal file
@ -0,0 +1,72 @@
|
||||
import arrow
|
||||
|
||||
from app import config
|
||||
from app.models import Job, JobState
|
||||
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||
|
||||
|
||||
def test_cleanup_old_jobs():
|
||||
Job.filter().delete()
|
||||
now = arrow.now()
|
||||
delete_ids = [
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=-1),
|
||||
state=JobState.done.value,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=-1),
|
||||
state=JobState.error.value,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=-1),
|
||||
state=JobState.taken.value,
|
||||
attempts=config.JOB_MAX_ATTEMPTS,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
]
|
||||
|
||||
keep_ids = [
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=+1),
|
||||
state=JobState.done.value,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=+1),
|
||||
state=JobState.error.value,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=+1),
|
||||
state=JobState.taken.value,
|
||||
attempts=config.JOB_MAX_ATTEMPTS,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
Job.create(
|
||||
updated_at=now.shift(minutes=-1),
|
||||
state=JobState.taken.value,
|
||||
attempts=config.JOB_MAX_ATTEMPTS - 1,
|
||||
name="",
|
||||
payload="",
|
||||
flush=True,
|
||||
).id,
|
||||
]
|
||||
cleanup_old_jobs(now)
|
||||
for delete_id in delete_ids:
|
||||
assert Job.get(id=delete_id) is None
|
||||
for keep_id in keep_ids:
|
||||
assert Job.get(id=keep_id) is not None
|
26
app/tests/tasks/test_cleanup_old_notifications.py
Normal file
26
app/tests/tasks/test_cleanup_old_notifications.py
Normal file
@ -0,0 +1,26 @@
|
||||
import arrow
|
||||
|
||||
from app.models import Notification
|
||||
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||
from tests.utils import create_new_user
|
||||
|
||||
|
||||
def test_cleanup_old_notifications():
|
||||
Notification.filter().delete()
|
||||
user = create_new_user()
|
||||
now = arrow.now()
|
||||
delete_id = Notification.create(
|
||||
user_id=user.id,
|
||||
created_at=now.shift(minutes=-1),
|
||||
message="",
|
||||
flush=True,
|
||||
).id
|
||||
keep_id = Notification.create(
|
||||
user_id=user.id,
|
||||
created_at=now.shift(minutes=+1),
|
||||
message="",
|
||||
flush=True,
|
||||
).id
|
||||
cleanup_old_notifications(now)
|
||||
assert Notification.get(id=delete_id) is None
|
||||
assert Notification.get(id=keep_id) is not None
|
@ -384,3 +384,30 @@ def test_break_loop_alias_as_mailbox(flask_client):
|
||||
msg[headers.SUBJECT] = random_string()
|
||||
result = email_handler.handle(envelope, msg)
|
||||
assert result == status.E525
|
||||
|
||||
|
||||
@mail_sender.store_emails_test_decorator
|
||||
def test_preserve_headers(flask_client):
|
||||
headers_to_keep = [
|
||||
headers.SUBJECT,
|
||||
headers.DATE,
|
||||
headers.MESSAGE_ID,
|
||||
headers.REFERENCES,
|
||||
headers.IN_REPLY_TO,
|
||||
headers.SL_QUEUE_ID,
|
||||
] + headers.MIME_HEADERS
|
||||
user = create_new_user()
|
||||
alias = Alias.create_new_random(user)
|
||||
envelope = Envelope()
|
||||
envelope.mail_from = "somewhere@lo.cal"
|
||||
envelope.rcpt_tos = [alias.email]
|
||||
msg = EmailMessage()
|
||||
for header in headers_to_keep:
|
||||
msg[header] = header + "keep"
|
||||
result = email_handler.handle(envelope, msg)
|
||||
assert result == status.E200
|
||||
sent_mails = mail_sender.get_stored_emails()
|
||||
assert len(sent_mails) == 1
|
||||
msg = sent_mails[0].msg
|
||||
for header in headers_to_keep:
|
||||
assert msg[header] == header + "keep"
|
||||
|
Reference in New Issue
Block a user