Compare commits

..

63 Commits

Author SHA1 Message Date
6ce4344005 4.41.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m35s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m50s
Build-Release-Image / Merge-Images (push) Successful in 19s
Build-Release-Image / Create-Release (push) Successful in 12s
Build-Release-Image / Notify (push) Successful in 21s
2024-03-14 12:00:08 +00:00
2f50662c5d 4.40.2
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m24s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 4m8s
Build-Release-Image / Merge-Images (push) Successful in 51s
Build-Release-Image / Create-Release (push) Successful in 22s
Build-Release-Image / Notify (push) Successful in 18s
2024-03-07 12:00:08 +00:00
810b59efec 4.40.1
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m26s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m48s
Build-Release-Image / Merge-Images (push) Successful in 20s
Build-Release-Image / Create-Release (push) Successful in 33s
Build-Release-Image / Notify (push) Successful in 20s
2024-03-05 12:00:09 +00:00
bfbcf567aa Merge pull request 'Replace Drone with Gitea Actions' (#1) from gitea-actions into main
Reviewed-on: #1
2024-03-04 13:42:58 +00:00
a505186051 Remove Drone 2024-03-04 13:38:57 +00:00
8fcca8571a Add Gitea Actions 2024-03-04 13:38:52 +00:00
1550507667 4.39.3
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-27 12:00:07 +00:00
757f153042 4.39.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-23 12:00:07 +00:00
a9f65bed60 4.39.1
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-20 12:00:07 +00:00
a8ca607581 4.38.3
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-14 12:00:07 +00:00
5b47bd1654 4.38.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-06 12:00:07 +00:00
e9faf93878 4.38.0
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-03 16:55:23 +00:00
0f60f7cec9 4.37.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-27 12:00:07 +00:00
3180034ff8 4.37.1
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-25 12:00:08 +00:00
b3ee67213d 4.37.0
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-18 12:00:07 +00:00
aeb34f8582 4.36.8
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-28 12:00:07 +00:00
2372b8f50f 4.36.7
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-21 12:00:09 +00:00
f3050b2ca0 4.36.6
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-17 14:56:57 +00:00
ebe941c8a5 4.36.5
All checks were successful
continuous-integration/drone/tag Build is passing
2023-11-30 12:00:09 +00:00
651b2dd52a 4.36.4 2023-11-22 12:00:09 +00:00
1c580cb6f7 4.36.3 2023-11-08 12:00:06 +00:00
21765ae9d8 4.35.6 2023-11-07 12:00:06 +00:00
d661a52f43 4.35.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-05 12:00:06 +01:00
45528ff81d 4.35.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-03 12:00:06 +01:00
6170fbf127 4.35.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-02 12:00:06 +01:00
c8ab76066b 4.35.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-09-29 12:00:06 +01:00
357d34a42b 4.34.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-31 12:00:06 +01:00
246754872d 4.34.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-29 20:20:00 +01:00
df59d73d66 4.34.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-25 12:00:05 +01:00
ff6d78f255 4.34.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-09 12:00:05 +01:00
d59fa5fe1c Update .drone.yml 2023-08-06 17:56:31 +00:00
de1fe02200 4.33.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-06 17:51:04 +01:00
439bfc5efd Update README.md 2023-08-06 16:04:57 +00:00
0a8a420850 Update README.md 2023-08-06 16:04:47 +00:00
d119e74c2f Update README.md 2023-08-06 16:04:41 +00:00
b5485429ef Remove provenance [CI SKIP] 2023-08-06 16:01:04 +00:00
f3a7900cbd 4.32.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-02 16:49:54 +01:00
0f91161ff3 4.32.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-07-12 11:00:04 +00:00
1da9a3f828 4.32.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-07-11 11:00:05 +00:00
167e56bc95 4.31.0
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-30 11:00:06 +00:00
c5a0d2d513 4.30.1
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-28 11:00:03 +00:00
25ebbaa7fd 4.30.0
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-27 11:00:04 +00:00
067d94841e 4.29.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-06-07 11:00:05 +00:00
804eec0c03 4.29.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-06-01 11:00:05 +00:00
651f3f1e9c 4.28.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-05-16 11:00:09 +00:00
fd988d6ef0 4.28.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-05-10 11:00:05 +00:00
da4a8cc979 4.27.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-25 11:00:05 +00:00
299da46abe 4.26.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-20 11:00:06 +00:00
1ad8294ec3 4.25.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-15 11:00:05 +00:00
f5de4a9624 4.24.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-11 11:00:05 +00:00
5501b033e8 4.23.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-24 12:00:07 +00:00
32a4b865ef 4.22.5
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-14 12:00:06 +00:00
5d5a23dd63 Update '.drone.yml' 2023-03-08 18:32:53 +00:00
03053d0e54 4.22.4
Some checks are pending
continuous-integration/drone/tag Build is running
2023-03-08 12:00:06 +00:00
4d70590d05 4.22.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-01 12:00:06 +00:00
bc879c10ae 4.22.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-02-16 12:00:05 +00:00
c6b237a004 Update 'README.md' 2023-02-10 13:00:46 +00:00
722979fe19 Update 'README.md' 2023-01-27 16:29:12 +00:00
b63ada023d Update '.drone.yml' 2023-01-27 16:26:22 +00:00
8b4e4e3a2b 4.22.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-01-17 12:00:04 +00:00
32465d1220 4.21.3
All checks were successful
continuous-integration/drone/tag Build is passing
2022-12-30 16:47:07 +00:00
98bae4c86c 4.21.3 2022-12-30 16:23:27 +00:00
7ff6cf2451 add drone 2022-12-30 15:35:10 +00:00
27 changed files with 108 additions and 586 deletions

View File

@ -5,7 +5,7 @@ on:
- '*' - '*'
env: env:
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login-dev
TEA_VERSION: 0.9.2 TEA_VERSION: 0.9.2
jobs: jobs:

View File

@ -1,6 +1,7 @@
name: Test and lint name: Test and lint
on: [push, pull_request] on:
push:
jobs: jobs:
lint: lint:
@ -138,12 +139,6 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Create Sentry release - name: Create Sentry release
uses: getsentry/action-release@v1 uses: getsentry/action-release@v1
env: env:
@ -163,7 +158,6 @@ jobs:
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
context: . context: .
platforms: linux/amd64,linux/arm64
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}

View File

@ -151,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles:
## Pull request ## Pull request
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run The code is formatted using https://github.com/psf/black, to format the code, simply run
``` ```
poetry run ruff format . poetry run black .
``` ```
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by

View File

@ -434,8 +434,6 @@ HIBP_MAX_ALIAS_CHECK = 10_000
HIBP_RPM = 100 HIBP_RPM = 100
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS") HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
KEEP_OLD_DATA_DAYS = 30
POSTMASTER = os.environ.get("POSTMASTER") POSTMASTER = os.environ.get("POSTMASTER")
# store temporary files, especially for debugging # store temporary files, especially for debugging

View File

@ -2,12 +2,10 @@ from app.dashboard.base import dashboard_bp
from flask_login import login_required, current_user from flask_login import login_required, current_user
from app.alias_utils import alias_export_csv from app.alias_utils import alias_export_csv
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.extensions import limiter
@dashboard_bp.route("/alias_export", methods=["GET"]) @dashboard_bp.route("/alias_export", methods=["GET"])
@login_required @login_required
@sudo_required @sudo_required
@limiter.limit("2/minute")
def alias_export_route(): def alias_export_route():
return alias_export_csv(current_user) return alias_export_csv(current_user)

View File

@ -7,7 +7,6 @@ from app.config import JOB_BATCH_IMPORT
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session from app.db import Session
from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import File, BatchImport, Job from app.models import File, BatchImport, Job
from app.utils import random_string, CSRFValidationForm from app.utils import random_string, CSRFValidationForm
@ -16,7 +15,6 @@ from app.utils import random_string, CSRFValidationForm
@dashboard_bp.route("/batch_import", methods=["GET", "POST"]) @dashboard_bp.route("/batch_import", methods=["GET", "POST"])
@login_required @login_required
@sudo_required @sudo_required
@limiter.limit("10/minute", methods=["POST"])
def batch_import_route(): def batch_import_route():
# only for users who have custom domains # only for users who have custom domains
if not current_user.verified_custom_domains(): if not current_user.verified_custom_domains():
@ -41,7 +39,7 @@ def batch_import_route():
return redirect(request.url) return redirect(request.url)
if len(batch_imports) > 10: if len(batch_imports) > 10:
flash( flash(
"You have too many imports already. Please wait until some get cleaned up", "You have too many imports already. Wait until some get cleaned up",
"error", "error",
) )
return render_template( return render_template(

View File

@ -494,10 +494,9 @@ def delete_header(msg: Message, header: str):
def sanitize_header(msg: Message, header: str): def sanitize_header(msg: Message, header: str):
"""remove trailing space and remove linebreak from a header""" """remove trailing space and remove linebreak from a header"""
header_lowercase = header.lower()
for i in reversed(range(len(msg._headers))): for i in reversed(range(len(msg._headers))):
header_name = msg._headers[i][0].lower() header_name = msg._headers[i][0].lower()
if header_name == header_lowercase: if header_name == header.lower():
# msg._headers[i] is a tuple like ('From', 'hey@google.com') # msg._headers[i] is a tuple like ('From', 'hey@google.com')
if msg._headers[i][1]: if msg._headers[i][1]:
msg._headers[i] = ( msg._headers[i] = (

View File

@ -5,9 +5,19 @@ from typing import Optional
import boto3 import boto3
import requests import requests
from app import config from app.config import (
AWS_REGION,
BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
AWS_ENDPOINT_URL,
)
from app.log import LOG from app.log import LOG
_s3_client = None _s3_client = None
@ -15,12 +25,12 @@ def _get_s3client():
global _s3_client global _s3_client
if _s3_client is None: if _s3_client is None:
args = { args = {
"aws_access_key_id": config.AWS_ACCESS_KEY_ID, "aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY, "aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": config.AWS_REGION, "region_name": AWS_REGION,
} }
if config.AWS_ENDPOINT_URL: if AWS_ENDPOINT_URL:
args["endpoint_url"] = config.AWS_ENDPOINT_URL args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args) _s3_client = boto3.client("s3", **args)
return _s3_client return _s3_client
@ -28,8 +38,8 @@ def _get_s3client():
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"): def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0) bs.seek(0)
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, key) file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path) file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True) os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f: with open(file_path, "wb") as f:
@ -37,7 +47,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
else: else:
_get_s3client().put_object( _get_s3client().put_object(
Bucket=config.BUCKET, Bucket=BUCKET,
Key=key, Key=key,
Body=bs, Body=bs,
ContentType=content_type, ContentType=content_type,
@ -47,8 +57,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
def upload_email_from_bytesio(path: str, bs: BytesIO, filename): def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
bs.seek(0) bs.seek(0)
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path) file_path = os.path.join(UPLOAD_DIR, path)
file_dir = os.path.dirname(file_path) file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True) os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f: with open(file_path, "wb") as f:
@ -56,7 +66,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
else: else:
_get_s3client().put_object( _get_s3client().put_object(
Bucket=config.BUCKET, Bucket=BUCKET,
Key=path, Key=path,
Body=bs, Body=bs,
# Support saving a remote file using Http header # Support saving a remote file using Http header
@ -67,12 +77,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
def download_email(path: str) -> Optional[str]: def download_email(path: str) -> Optional[str]:
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path) file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f: with open(file_path, "rb") as f:
return f.read() return f.read()
resp = _get_s3client().get_object( resp = _get_s3client().get_object(
Bucket=config.BUCKET, Bucket=BUCKET,
Key=path, Key=path,
) )
if not resp or "Body" not in resp: if not resp or "Body" not in resp:
@ -86,30 +96,29 @@ def upload_from_url(url: str, upload_path):
def get_url(key: str, expires_in=3600) -> str: def get_url(key: str, expires_in=3600) -> str:
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
return config.URL + "/static/upload/" + key return URL + "/static/upload/" + key
else: else:
return _get_s3client().generate_presigned_url( return _get_s3client().generate_presigned_url(
ExpiresIn=expires_in, ExpiresIn=expires_in,
ClientMethod="get_object", ClientMethod="get_object",
Params={"Bucket": config.BUCKET, "Key": key}, Params={"Bucket": BUCKET, "Key": key},
) )
def delete(path: str): def delete(path: str):
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path) os.remove(os.path.join(UPLOAD_DIR, path))
os.remove(file_path)
else: else:
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path) _get_s3client().delete_object(Bucket=BUCKET, Key=path)
def create_bucket_if_not_exists(): def create_bucket_if_not_exists():
s3client = _get_s3client() s3client = _get_s3client()
buckets = s3client.list_buckets() buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]: for bucket in buckets["Buckets"]:
if bucket["Name"] == config.BUCKET: if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists") LOG.i("Bucket already exists")
return return
s3client.create_bucket(Bucket=config.BUCKET) s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {config.BUCKET} created") LOG.i(f"Bucket {BUCKET} created")

View File

@ -5,7 +5,7 @@ from typing import List, Tuple
import arrow import arrow
import requests import requests
from sqlalchemy import func, desc, or_, and_ from sqlalchemy import func, desc, or_, and_, nullsfirst
from sqlalchemy.ext.compiler import compiles from sqlalchemy.ext.compiler import compiles
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from sqlalchemy.orm.exc import ObjectDeletedError from sqlalchemy.orm.exc import ObjectDeletedError
@ -61,9 +61,6 @@ from app.pgp_utils import load_public_key_and_check, PGPException
from app.proton.utils import get_proton_partner from app.proton.utils import get_proton_partner
from app.utils import sanitize_email from app.utils import sanitize_email
from server import create_light_app from server import create_light_app
from tasks.cleanup_old_imports import cleanup_old_imports
from tasks.cleanup_old_jobs import cleanup_old_jobs
from tasks.cleanup_old_notifications import cleanup_old_notifications
DELETE_GRACE_DAYS = 30 DELETE_GRACE_DAYS = 30
@ -979,9 +976,6 @@ async def _hibp_check(api_key, queue):
continue continue
user = alias.user user = alias.user
if user.disabled or not user.is_paid(): if user.disabled or not user.is_paid():
# Mark it as hibp done to skip it as if it had been checked
alias.hibp_last_check = arrow.utcnow()
Session.commit()
continue continue
LOG.d("Checking HIBP for %s", alias) LOG.d("Checking HIBP for %s", alias)
@ -1036,60 +1030,6 @@ async def _hibp_check(api_key, queue):
await asyncio.sleep(rate_sleep) await asyncio.sleep(rate_sleep)
def get_alias_to_check_hibp(
oldest_hibp_allowed: arrow.Arrow,
user_ids_to_skip: list[int],
min_alias_id: int,
max_alias_id: int,
):
now = arrow.now()
alias_query = (
Session.query(Alias)
.join(User, User.id == Alias.user_id)
.join(Subscription, User.id == Subscription.user_id, isouter=True)
.join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True)
.join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True)
.join(
CoinbaseSubscription,
User.id == CoinbaseSubscription.user_id,
isouter=True,
)
.join(PartnerUser, User.id == PartnerUser.user_id, isouter=True)
.join(
PartnerSubscription,
PartnerSubscription.partner_user_id == PartnerUser.id,
isouter=True,
)
.filter(
or_(
Alias.hibp_last_check.is_(None),
Alias.hibp_last_check < oldest_hibp_allowed,
),
Alias.user_id.notin_(user_ids_to_skip),
Alias.enabled,
Alias.id >= min_alias_id,
Alias.id < max_alias_id,
User.disabled == False, # noqa: E712
or_(
User.lifetime,
ManualSubscription.end_at > now,
Subscription.next_bill_date > now.date(),
AppleSubscription.expires_date > now,
CoinbaseSubscription.end_at > now,
PartnerSubscription.end_at > now,
),
)
)
if config.HIBP_SKIP_PARTNER_ALIAS:
alias_query = alias_query.filter(
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0
)
for alias in (
alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500)
):
yield alias
async def check_hibp(): async def check_hibp():
""" """
Check all aliases on the HIBP (Have I Been Pwned) API Check all aliases on the HIBP (Have I Been Pwned) API
@ -1116,43 +1056,43 @@ async def check_hibp():
user_ids = [row[0] for row in rows] user_ids = [row[0] for row in rows]
LOG.d("Got %d users to skip" % len(user_ids)) LOG.d("Got %d users to skip" % len(user_ids))
LOG.d("Checking aliases") LOG.d("Preparing list of aliases to check")
queue = asyncio.Queue() queue = asyncio.Queue()
min_alias_id = 0 max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
max_alias_id = Session.query(func.max(Alias.id)).scalar() alias_query = Alias.filter(
step = 10000 or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date),
now = arrow.now() Alias.user_id.notin_(user_ids),
oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS) Alias.enabled,
alias_checked = 0 )
for alias_batch_id in range(min_alias_id, max_alias_id, step): if config.HIBP_SKIP_PARTNER_ALIAS:
for alias in get_alias_to_check_hibp( alias_query = alias_query(Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0)
oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step for alias in (
): alias_query.order_by(nullsfirst(Alias.hibp_last_check.asc()), Alias.id.asc())
await queue.put(alias.id) .yield_per(500)
.enable_eagerloads(False)
):
await queue.put(alias.id)
alias_checked += queue.qsize() LOG.d("Need to check about %s aliases", queue.qsize())
LOG.d(
f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}"
)
# Start one checking process per API key # Start one checking process per API key
# Each checking process will take one alias from the queue, get the info # Each checking process will take one alias from the queue, get the info
# and then sleep for 1.5 seconds (due to HIBP API request limits) # and then sleep for 1.5 seconds (due to HIBP API request limits)
checkers = [] checkers = []
for i in range(len(config.HIBP_API_KEYS)): for i in range(len(config.HIBP_API_KEYS)):
checker = asyncio.create_task( checker = asyncio.create_task(
_hibp_check( _hibp_check(
config.HIBP_API_KEYS[i], config.HIBP_API_KEYS[i],
queue, queue,
)
) )
checkers.append(checker) )
checkers.append(checker)
# Wait until all checking processes are done # Wait until all checking processes are done
for checker in checkers: for checker in checkers:
await checker await checker
LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches") LOG.d("Done checking HIBP API for aliases in breaches")
def notify_hibp(): def notify_hibp():
@ -1224,13 +1164,6 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
Session.commit() Session.commit()
def delete_old_data():
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
cleanup_old_imports(oldest_valid)
cleanup_old_jobs(oldest_valid)
cleanup_old_notifications(oldest_valid)
if __name__ == "__main__": if __name__ == "__main__":
LOG.d("Start running cronjob") LOG.d("Start running cronjob")
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -1245,7 +1178,6 @@ if __name__ == "__main__":
"notify_manual_subscription_end", "notify_manual_subscription_end",
"notify_premium_end", "notify_premium_end",
"delete_logs", "delete_logs",
"delete_old_data",
"poll_apple_subscription", "poll_apple_subscription",
"sanity_check", "sanity_check",
"delete_old_monitoring", "delete_old_monitoring",
@ -1274,9 +1206,6 @@ if __name__ == "__main__":
elif args.job == "delete_logs": elif args.job == "delete_logs":
LOG.d("Deleted Logs") LOG.d("Deleted Logs")
delete_logs() delete_logs()
elif args.job == "delete_old_data":
LOG.d("Delete old data")
delete_old_data()
elif args.job == "poll_apple_subscription": elif args.job == "poll_apple_subscription":
LOG.d("Poll Apple Subscriptions") LOG.d("Poll Apple Subscriptions")
poll_apple_subscription() poll_apple_subscription()

View File

@ -37,12 +37,6 @@ jobs:
schedule: "15 5 * * *" schedule: "15 5 * * *"
captureStderr: true captureStderr: true
- name: SimpleLogin Delete Old data
command: python /code/cron.py -j delete_old_data
shell: /bin/bash
schedule: "30 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions - name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash shell: /bin/bash

View File

@ -2040,11 +2040,10 @@ def handle(envelope: Envelope, msg: Message) -> str:
return status.E204 return status.E204
# sanitize email headers # sanitize email headers
sanitize_header(msg, headers.FROM) sanitize_header(msg, "from")
sanitize_header(msg, headers.TO) sanitize_header(msg, "to")
sanitize_header(msg, headers.CC) sanitize_header(msg, "cc")
sanitize_header(msg, headers.REPLY_TO) sanitize_header(msg, "reply-to")
sanitize_header(msg, headers.MESSAGE_ID)
LOG.d( LOG.d(
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, " "==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python3
import argparse
import random
import time
from sqlalchemy import func
from app import config
from app.models import Alias, Contact
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
max_alias_id: int = Session.query(func.max(Alias.id)).scalar()
start = time.time()
tests = 1000
for i in range(tests):
alias = (
Alias.filter(Alias.id > int(random.random() * max_alias_id))
.order_by(Alias.id.asc())
.limit(1)
.first()
)
contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first()
mailboxes = alias.mailboxes
user = alias.user
if i % 10:
print("{i} -> {alias.id}")
end = time.time()
time_taken = end - start
print(f"Took {time_taken} -> {time_taken/tests} per test")

View File

@ -1,56 +1,29 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import argparse import argparse
import time
from sqlalchemy import func
from app.log import LOG
from app.models import Alias, SLDomain from app.models import Alias, SLDomain
from app.db import Session from app.db import Session
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
prog="Mark partner created aliases with the PARTNER_CREATED flag", prog="Mark partner created aliases with the PARTNER_CREATED flag",
) )
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args() args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Updating aliases from {alias_id_start} to {max_alias_id}")
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all() domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
cond = [f"email like '%{domain.domain}'" for domain in domains]
sql_or_cond = " OR ".join(cond)
sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})"
print(sql)
step = 1000 for domain in domains:
updated = 0 LOG.i(f"Checking aliases for domain {domain.domain}")
start_time = time.time() for alias in (
for batch_start in range(alias_id_start, max_alias_id, step): Alias.filter(
updated += Session.execute( Alias.email.like(f"%{domain.domain}"),
sql, Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0,
{ )
"start": batch_start, .enable_eagerloads(False)
"end": batch_start + step, .yield_per(100)
"flag": Alias.FLAG_PARTNER_CREATED, .all()
}, ):
).rowcount alias.flags = alias.flags | Alias.FLAG_PARTNER_CREATED
elapsed = time.time() - start_time LOG.i(f" * Updating {alias.email} to {alias.flags}")
time_per_alias = elapsed / (batch_start - alias_id_start + step) Session.commit()
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
percent = int(
((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start)
)
print(
f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining"
)
print(f"Updated aliases up to {max_alias_id}")

View File

View File

@ -1,19 +0,0 @@
import arrow
from app import s3
from app.log import LOG
from app.models import BatchImport
def cleanup_old_imports(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting imports older than {oldest_allowed}")
for batch_import in (
BatchImport.filter(BatchImport.created_at < oldest_allowed).yield_per(500).all()
):
LOG.i(
f"Deleting batch import {batch_import} with file {batch_import.file.path}"
)
file = batch_import.file
if file is not None:
s3.delete(file.path)
BatchImport.delete(batch_import.id, commit=True)

View File

@ -1,24 +0,0 @@
import arrow
from sqlalchemy import or_, and_
from app import config
from app.db import Session
from app.log import LOG
from app.models import Job, JobState
def cleanup_old_jobs(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting jobs older than {oldest_allowed}")
count = Job.filter(
or_(
Job.state == JobState.done.value,
Job.state == JobState.error.value,
and_(
Job.state == JobState.taken.value,
Job.attempts >= config.JOB_MAX_ATTEMPTS,
),
),
Job.updated_at < oldest_allowed,
).delete()
Session.commit()
LOG.i(f"Deleted {count} jobs")

View File

@ -1,12 +0,0 @@
import arrow
from app.db import Session
from app.log import LOG
from app.models import Notification
def cleanup_old_notifications(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting notifications older than {oldest_allowed}")
count = Notification.filter(Notification.created_at < oldest_allowed).delete()
Session.commit()
LOG.i(f"Deleted {count} notifications")

View File

@ -120,6 +120,21 @@
</div> </div>
</div> </div>
<!-- END WebAuthn --> <!-- END WebAuthn -->
<!-- Alias import/export -->
<div class="card">
<div class="card-body">
<div class="card-title">Alias import/export</div>
<div class="mb-3">
You can import your aliases created on other platforms into SimpleLogin.
You can also export your aliases to a readable csv format for a future batch import.
</div>
<a href="{{ url_for('dashboard.batch_import_route') }}"
class="btn btn-outline-primary">Batch Import</a>
<a href="{{ url_for('dashboard.alias_export_route') }}"
class="btn btn-outline-secondary">Export Aliases</a>
</div>
</div>
<!-- END Alias import/export -->
<!-- data export --> <!-- data export -->
<div class="card"> <div class="card">
<div class="card-body"> <div class="card-body">

View File

@ -559,7 +559,7 @@
sender address. sender address.
<br /> <br />
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b> If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
and the original From header is stored in <b>X-SimpleLogin-Original-From</b>. and the original From header is stored in <b>X-SimpleLogin-Original-From<b>.
You can choose to display this header in your email client. You can choose to display this header in your email client.
<br /> <br />
As email headers aren't encrypted, your mailbox service can know the sender address via this header. As email headers aren't encrypted, your mailbox service can know the sender address via this header.
@ -583,23 +583,6 @@
</form> </form>
</div> </div>
</div> </div>
<!-- Alias import/export -->
<div class="card">
<div class="card-body">
<div class="card-title">
Alias import/export
</div>
<div class="mb-3">
You can import your aliases created on other platforms into SimpleLogin.
You can also export your aliases to a readable csv format for a future batch import.
</div>
<a href="{{ url_for('dashboard.batch_import_route') }}"
class="btn btn-outline-primary">Batch Import</a>
<a href="{{ url_for('dashboard.alias_export_route') }}"
class="btn btn-outline-secondary">Export Aliases</a>
</div>
</div>
<!-- END Alias import/export -->
</div> </div>
{% endblock %} {% endblock %}
{% block script %} {% block script %}

View File

@ -1,142 +0,0 @@
import arrow
import pytest
import cron
from app.db import Session
from app.models import (
Alias,
AppleSubscription,
PlanEnum,
CoinbaseSubscription,
ManualSubscription,
Subscription,
PartnerUser,
PartnerSubscription,
User,
)
from app.proton.utils import get_proton_partner
from tests.utils import create_new_user, random_token
def test_get_alias_for_free_user_has_no_alias():
user = create_new_user()
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_get_alias_for_lifetime_with_null_hibp_date():
user = create_new_user()
user.lifetime = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def test_get_alias_for_lifetime_with_old_hibp_date():
user = create_new_user()
user.lifetime = True
alias = Alias.create_new_random(user)
alias.hibp_last_check = arrow.now().shift(days=-1)
alias_id = alias.id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def create_partner_sub(user: User):
pu = PartnerUser.create(
partner_id=get_proton_partner().id,
partner_email=user.email,
external_user_id=random_token(10),
user_id=user.id,
flush=True,
)
PartnerSubscription.create(
partner_user_id=pu.id, end_at=arrow.utcnow().shift(days=15)
)
sub_generator_list = [
lambda u: AppleSubscription.create(
user_id=u.id,
expires_date=arrow.now().shift(days=15),
original_transaction_id=random_token(10),
receipt_data=random_token(10),
plan=PlanEnum.monthly,
),
lambda u: CoinbaseSubscription.create(
user_id=u.id,
end_at=arrow.now().shift(days=15),
),
lambda u: ManualSubscription.create(
user_id=u.id,
end_at=arrow.now().shift(days=15),
),
lambda u: Subscription.create(
user_id=u.id,
cancel_url="",
update_url="",
subscription_id=random_token(10),
event_time=arrow.now(),
next_bill_date=arrow.now().shift(days=15).date(),
plan=PlanEnum.monthly,
),
create_partner_sub,
]
@pytest.mark.parametrize("sub_generator", sub_generator_list)
def test_get_alias_for_sub(sub_generator):
user = create_new_user()
sub_generator(user)
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def test_disabled_user_is_not_checked():
user = create_new_user()
user.lifetime = True
user.disabled = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_skipped_user_is_not_checked():
user = create_new_user()
user.lifetime = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_already_checked_is_not_checked():
user = create_new_user()
user.lifetime = True
alias = Alias.create_new_random(user)
alias.hibp_last_check = arrow.now().shift(days=1)
alias_id = alias.id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
)
assert len(aliases) == 0

View File

@ -1,35 +0,0 @@
import tempfile
from io import BytesIO
import arrow
from app import s3, config
from app.models import File, BatchImport
from tasks.cleanup_old_imports import cleanup_old_imports
from tests.utils import random_token, create_new_user
def test_cleanup_old_imports():
BatchImport.filter().delete()
with tempfile.TemporaryDirectory() as tmpdir:
config.UPLOAD_DIR = tmpdir
user = create_new_user()
path = random_token()
s3.upload_from_bytesio(path, BytesIO("data".encode("utf-8")))
file = File.create(path=path, commit=True) # noqa: F821
now = arrow.now()
delete_batch_import_id = BatchImport.create(
user_id=user.id,
file_id=file.id,
created_at=now.shift(minutes=-1),
flush=True,
).id
keep_batch_import_id = BatchImport.create(
user_id=user.id,
file_id=file.id,
created_at=now.shift(minutes=+1),
commit=True,
).id
cleanup_old_imports(now)
assert BatchImport.get(id=delete_batch_import_id) is None
assert BatchImport.get(id=keep_batch_import_id) is not None

View File

@ -1,72 +0,0 @@
import arrow
from app import config
from app.models import Job, JobState
from tasks.cleanup_old_jobs import cleanup_old_jobs
def test_cleanup_old_jobs():
Job.filter().delete()
now = arrow.now()
delete_ids = [
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.done.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.error.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS,
name="",
payload="",
flush=True,
).id,
]
keep_ids = [
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.done.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.error.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS - 1,
name="",
payload="",
flush=True,
).id,
]
cleanup_old_jobs(now)
for delete_id in delete_ids:
assert Job.get(id=delete_id) is None
for keep_id in keep_ids:
assert Job.get(id=keep_id) is not None

View File

@ -1,26 +0,0 @@
import arrow
from app.models import Notification
from tasks.cleanup_old_notifications import cleanup_old_notifications
from tests.utils import create_new_user
def test_cleanup_old_notifications():
Notification.filter().delete()
user = create_new_user()
now = arrow.now()
delete_id = Notification.create(
user_id=user.id,
created_at=now.shift(minutes=-1),
message="",
flush=True,
).id
keep_id = Notification.create(
user_id=user.id,
created_at=now.shift(minutes=+1),
message="",
flush=True,
).id
cleanup_old_notifications(now)
assert Notification.get(id=delete_id) is None
assert Notification.get(id=keep_id) is not None