Compare commits

...

6 Commits

Author SHA1 Message Date
f5bce7d7ff 4.39.2 2024-02-23 12:00:07 +00:00
75f45d9365 4.39.1 2024-02-20 12:00:07 +00:00
ead425e0c2 4.38.3 2024-02-14 12:00:07 +00:00
6c910d62c5 4.38.2 2024-02-06 12:00:07 +00:00
99ffd1ec0c 4.38.0 2024-02-03 16:55:23 +00:00
eda940f8b2 4.37.2 2024-01-27 12:00:07 +00:00
25 changed files with 398 additions and 129 deletions

View File

@ -511,6 +511,7 @@ server {
location / { location / {
proxy_pass http://localhost:7777; proxy_pass http://localhost:7777;
proxy_set_header Host $host;
} }
} }
``` ```

View File

@ -168,6 +168,8 @@ class NewUserStrategy(ClientMergeStrategy):
class ExistingUnlinkedUserStrategy(ClientMergeStrategy): class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult: def process(self) -> LinkResult:
# IF it was scheduled to be deleted. Unschedule it.
self.user.delete_on = None
partner_user = ensure_partner_user_exists_for_user( partner_user = ensure_partner_user_exists_for_user(
self.link_request, self.user, self.partner self.link_request, self.user, self.partner
) )
@ -246,6 +248,8 @@ def link_user(
) -> LinkResult: ) -> LinkResult:
# Sanitize email just in case # Sanitize email just in case
link_request.email = sanitize_email(link_request.email) link_request.email = sanitize_email(link_request.email)
# If it was scheduled to be deleted. Unschedule it.
current_user.delete_on = None
partner_user = ensure_partner_user_exists_for_user( partner_user = ensure_partner_user_exists_for_user(
link_request, current_user, partner link_request, current_user, partner
) )

View File

@ -214,6 +214,20 @@ class UserAdmin(SLModelView):
Session.commit() Session.commit()
@action(
"remove trial",
"Stop trial period",
"Remove trial for this user?",
)
def stop_trial(self, ids):
for user in User.filter(User.id.in_(ids)):
user.trial_end = None
flash(f"Stopped trial for {user}", "success")
AdminAuditLog.stop_trial(current_user.id, user.id)
Session.commit()
@action( @action(
"disable_otp_fido", "disable_otp_fido",
"Disable OTP & FIDO", "Disable OTP & FIDO",

View File

@ -33,6 +33,9 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if g.user.disabled: if g.user.disabled:
return jsonify(error="Disabled account"), 403 return jsonify(error="Disabled account"), 403
if not g.user.is_active():
return jsonify(error="Account does not exist"), 401
g.api_key = api_key g.api_key = api_key
return None return None

View File

@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
q = q.order_by(Alias.pinned.desc()) q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc()) q = q.order_by(latest_activity.desc())
q = list(q.limit(page_limit).offset(page_id * page_size)) q = q.limit(page_limit).offset(page_id * page_size)
ret = [] ret = []
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q: for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
ret.append( ret.append(
AliasInfo( AliasInfo(
alias=alias, alias=alias,
@ -358,7 +358,6 @@ def construct_alias_query(user: User):
else_=0, else_=0,
) )
).label("nb_forward"), ).label("nb_forward"),
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
) )
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True) .join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
.filter(Alias.user_id == user.id) .filter(Alias.user_id == user.id)
@ -366,14 +365,6 @@ def construct_alias_query(user: User):
.subquery() .subquery()
) )
alias_contact_subquery = (
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
return ( return (
Session.query( Session.query(
Alias, Alias,
@ -385,23 +376,7 @@ def construct_alias_query(user: User):
) )
.options(joinedload(Alias.hibp_breaches)) .options(joinedload(Alias.hibp_breaches))
.options(joinedload(Alias.custom_domain)) .options(joinedload(Alias.custom_domain))
.join(Contact, Alias.id == Contact.alias_id, isouter=True) .join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True) .join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
.filter(Alias.id == alias_activity_subquery.c.id) .filter(Alias.id == alias_activity_subquery.c.id)
.filter(Alias.id == alias_contact_subquery.c.id)
.filter(
or_(
EmailLog.created_at
== alias_activity_subquery.c.latest_email_log_created_at,
and_(
# no email log yet for this alias
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
# to make sure only 1 contact is returned in this case
or_(
Contact.id == alias_contact_subquery.c.max_contact_id,
alias_contact_subquery.c.max_contact_id.is_(None),
),
),
)
)
) )

View File

@ -17,9 +17,14 @@ from app.models import PlanEnum, AppleSubscription
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly" _MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly" _YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
# SL Mac app used to be in SL account
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly" _MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly" _MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
# SL Mac app is moved to Proton account
_MACAPP_MONTHLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.yearly"
# Apple API URL # Apple API URL
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt" _SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt" _PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
@ -263,7 +268,11 @@ def apple_update_notification():
plan = ( plan = (
PlanEnum.monthly PlanEnum.monthly
if transaction["product_id"] if transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID) in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
else PlanEnum.yearly else PlanEnum.yearly
) )
@ -517,7 +526,11 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
plan = ( plan = (
PlanEnum.monthly PlanEnum.monthly
if latest_transaction["product_id"] if latest_transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID) in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
else PlanEnum.yearly else PlanEnum.yearly
) )

View File

@ -492,6 +492,31 @@ NAMESERVERS = setup_nameservers()
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get( DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get(
"DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False "DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False
) )
# Expect format hits,seconds:hits,seconds...
# Example 1,10:4,60 means 1 in the last 10 secs or 4 in the last 60 secs
def getRateLimitFromConfig(
env_var: string, default: string = ""
) -> list[tuple[int, int]]:
value = os.environ.get(env_var, default)
if not value:
return []
entries = [entry for entry in value.split(":")]
limits = []
for entry in entries:
fields = entry.split(",")
limit = (int(fields[0]), int(fields[1]))
limits.append(limit)
return limits
ALIAS_CREATE_RATE_LIMIT_FREE = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_FREE", "10,900:50,3600"
)
ALIAS_CREATE_RATE_LIMIT_PAID = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_PAID", "50,900:200,3600"
)
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or ( PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
FLASK_SECRET + "partnerapitoken" FLASK_SECRET + "partnerapitoken"
) )

View File

@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
refused_email = RefusedEmail.create( refused_email = RefusedEmail.create(
full_report_path=s3_report_path, user_id=alias.user_id, flush=True full_report_path=s3_report_path, user_id=alias.user_id, flush=True
) )
return EmailLog.create( email_log = EmailLog.create(
user_id=alias.user_id, user_id=alias.user_id,
mailbox_id=alias.mailbox_id, mailbox_id=alias.mailbox_id,
contact_id=contact.id, contact_id=contact.id,
@ -142,6 +142,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
blocked=True, blocked=True,
commit=True, commit=True,
) )
return email_log
def apply_dmarc_policy_for_reply_phase( def apply_dmarc_policy_for_reply_phase(

View File

@ -27,7 +27,7 @@ from sqlalchemy.orm import deferred
from sqlalchemy.sql import and_ from sqlalchemy.sql import and_
from sqlalchemy_utils import ArrowType from sqlalchemy_utils import ArrowType
from app import config from app import config, rate_limiter
from app import s3 from app import s3
from app.db import Session from app.db import Session
from app.dns_utils import get_mx_domains from app.dns_utils import get_mx_domains
@ -235,6 +235,7 @@ class AuditLogActionEnum(EnumE):
download_provider_complaint = 8 download_provider_complaint = 8
disable_user = 9 disable_user = 9
enable_user = 10 enable_user = 10
stop_trial = 11
class Phase(EnumE): class Phase(EnumE):
@ -726,6 +727,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return True return True
def is_active(self) -> bool:
if self.delete_on is None:
return True
return self.delete_on < arrow.now()
def in_trial(self): def in_trial(self):
"""return True if user does not have lifetime licence or an active subscription AND is in trial period""" """return True if user does not have lifetime licence or an active subscription AND is in trial period"""
if self.lifetime_or_active_subscription(): if self.lifetime_or_active_subscription():
@ -827,6 +833,9 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
Whether user can create a new alias. User can't create a new alias if Whether user can create a new alias. User can't create a new alias if
- has more than 15 aliases in the free plan, *even in the free trial* - has more than 15 aliases in the free plan, *even in the free trial*
""" """
if not self.is_active():
return False
if self.disabled: if self.disabled:
return False return False
@ -907,7 +916,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return sub return sub
def verified_custom_domains(self) -> List["CustomDomain"]: def verified_custom_domains(self) -> List["CustomDomain"]:
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all() return (
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
.order_by(CustomDomain.domain.asc())
.all()
)
def mailboxes(self) -> List["Mailbox"]: def mailboxes(self) -> List["Mailbox"]:
"""list of mailbox that user own""" """list of mailbox that user own"""
@ -1495,6 +1508,8 @@ class Alias(Base, ModelMixin):
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True) TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
) )
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
__table_args__ = ( __table_args__ = (
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"), Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
# index on note column using pg_trgm # index on note column using pg_trgm
@ -1563,6 +1578,15 @@ class Alias(Base, ModelMixin):
flush = kw.pop("flush", False) flush = kw.pop("flush", False)
new_alias = cls(**kw) new_alias = cls(**kw)
user = User.get(new_alias.user_id)
if user.is_premium():
limits = config.ALIAS_CREATE_RATE_LIMIT_PAID
else:
limits = config.ALIAS_CREATE_RATE_LIMIT_FREE
# limits is array of (hits,days)
for limit in limits:
key = f"alias_create_{limit[1]}d:{user.id}"
rate_limiter.check_bucket_limit(key, limit[0], limit[1])
email = kw["email"] email = kw["email"]
# make sure email is lowercase and doesn't have any whitespace # make sure email is lowercase and doesn't have any whitespace
@ -2045,6 +2069,20 @@ class EmailLog(Base, ModelMixin):
def get_dashboard_url(self): def get_dashboard_url(self):
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}" return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
@classmethod
def create(cls, *args, **kwargs):
commit = kwargs.pop("commit", False)
email_log = super().create(*args, **kwargs)
Session.flush()
if "alias_id" in kwargs:
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
Session.execute(
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
)
if commit:
Session.commit()
return email_log
def __repr__(self): def __repr__(self):
return f"<EmailLog {self.id}>" return f"<EmailLog {self.id}>"
@ -3330,6 +3368,15 @@ class AdminAuditLog(Base):
}, },
) )
@classmethod
def stop_trial(cls, admin_user_id: int, user_id: int):
cls.create(
admin_user_id=admin_user_id,
action=AuditLogActionEnum.stop_trial.value,
model="User",
model_id=user_id,
)
@classmethod @classmethod
def disable_otp_fido( def disable_otp_fido(
cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool

View File

@ -140,7 +140,7 @@ def authorize():
Scope=Scope, Scope=Scope,
) )
else: # POST - user allows or denies else: # POST - user allows or denies
if not current_user.is_authenticated or not current_user.is_active: if not current_user.is_authenticated or not current_user.is_active():
LOG.i( LOG.i(
"Attempt to validate a OAUth allow request by an unauthenticated user" "Attempt to validate a OAUth allow request by an unauthenticated user"
) )

40
app/app/rate_limiter.py Normal file
View File

@ -0,0 +1,40 @@
from datetime import datetime
from typing import Optional
import newrelic.agent
import redis.exceptions
import werkzeug.exceptions
from limits.storage import RedisStorage
from app.log import LOG
lock_redis: Optional[RedisStorage] = None
def set_redis_concurrent_lock(redis: RedisStorage):
global lock_redis
lock_redis = redis
def check_bucket_limit(
lock_name: Optional[str] = None,
max_hits: int = 5,
bucket_seconds: int = 3600,
):
# Calculate current bucket time
int_time = int(datetime.utcnow().timestamp())
bucket_id = int_time - (int_time % bucket_seconds)
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
if not lock_redis:
return
try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits:
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
newrelic.agent.record_custom_event(
"BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
)
raise werkzeug.exceptions.TooManyRequests()
except (redis.exceptions.RedisError, AttributeError):
LOG.e("Cannot connect to redis")

View File

@ -2,6 +2,7 @@ import flask
import limits.storage import limits.storage
from app.parallel_limiter import set_redis_concurrent_lock from app.parallel_limiter import set_redis_concurrent_lock
from app.rate_limiter import set_redis_concurrent_lock as rate_limit_set_redis
from app.session import RedisSessionStore from app.session import RedisSessionStore
@ -10,12 +11,14 @@ def initialize_redis_services(app: flask.Flask, redis_url: str):
storage = limits.storage.RedisStorage(redis_url) storage = limits.storage.RedisStorage(redis_url)
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app) app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
set_redis_concurrent_lock(storage) set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
elif redis_url.startswith("redis+sentinel://"): elif redis_url.startswith("redis+sentinel://"):
storage = limits.storage.RedisSentinelStorage(redis_url) storage = limits.storage.RedisSentinelStorage(redis_url)
app.session_interface = RedisSessionStore( app.session_interface = RedisSessionStore(
storage.storage, storage.storage_slave, app storage.storage, storage.storage_slave, app
) )
set_redis_concurrent_lock(storage) set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
else: else:
raise RuntimeError( raise RuntimeError(
f"Tried to set_redis_session with an invalid redis url: ${redis_url}" f"Tried to set_redis_session with an invalid redis url: ${redis_url}"

View File

@ -62,6 +62,8 @@ from app.proton.utils import get_proton_partner
from app.utils import sanitize_email from app.utils import sanitize_email
from server import create_light_app from server import create_light_app
DELETE_GRACE_DAYS = 30
def notify_trial_end(): def notify_trial_end():
for user in User.filter( for user in User.filter(
@ -1126,14 +1128,19 @@ def notify_hibp():
Session.commit() Session.commit()
def clear_users_scheduled_to_be_deleted(): def clear_users_scheduled_to_be_deleted(dry_run=False):
users = User.filter( users = User.filter(
and_(User.delete_on.isnot(None), User.delete_on < arrow.now()) and_(
User.delete_on.isnot(None),
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
)
).all() ).all()
for user in users: for user in users:
LOG.i( LOG.i(
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}" f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
) )
if dry_run:
continue
User.delete(user.id) User.delete(user.id)
Session.commit() Session.commit()
@ -1206,4 +1213,4 @@ if __name__ == "__main__":
load_unsent_mails_from_fs_and_resend() load_unsent_mails_from_fs_and_resend()
elif args.job == "delete_scheduled_users": elif args.job == "delete_scheduled_users":
LOG.d("Deleting users scheduled to be deleted") LOG.d("Deleting users scheduled to be deleted")
clear_users_scheduled_to_be_deleted() clear_users_scheduled_to_be_deleted(dry_run=True)

View File

@ -62,7 +62,7 @@ jobs:
captureStderr: true captureStderr: true
- name: SimpleLogin delete users scheduled to be deleted - name: SimpleLogin delete users scheduled to be deleted
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users command: python /code/cron.py -j delete_scheduled_users
shell: /bin/bash shell: /bin/bash
schedule: "15 11 * * *" schedule: "15 11 * * *"
captureStderr: true captureStderr: true

View File

@ -636,6 +636,10 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
user = alias.user user = alias.user
if not user.is_active():
LOG.w(f"User {user} has been soft deleted")
return False, status.E502
if not user.can_send_or_receive(): if not user.can_send_or_receive():
LOG.i(f"User {user} cannot receive emails") LOG.i(f"User {user} cannot receive emails")
if should_ignore_bounce(envelope.mail_from): if should_ignore_bounce(envelope.mail_from):
@ -1055,6 +1059,9 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if not contact: if not contact:
LOG.w(f"No contact with {reply_email} as reverse alias") LOG.w(f"No contact with {reply_email} as reverse alias")
return False, status.E502 return False, status.E502
if not contact.user.is_active():
LOG.w(f"User {contact.user} has been soft deleted")
return False, status.E502
alias = contact.alias alias = contact.alias
alias_address: str = contact.alias.email alias_address: str = contact.alias.email
@ -1921,6 +1928,9 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
contact, contact,
alias, alias,
) )
if not email_log.user.is_active():
LOG.d(f"User {email_log.user} is not active")
return status.E510
if email_log.is_reply: if email_log.is_reply:
content_type = msg.get_content_type().lower() content_type = msg.get_content_type().lower()
@ -1982,6 +1992,9 @@ def send_no_reply_response(mail_from: str, msg: Message):
if not mailbox: if not mailbox:
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY)) LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
return return
if not mailbox.user.is_active():
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
return
send_email_at_most_times( send_email_at_most_times(
mailbox.user, mailbox.user,
ALERT_TO_NOREPLY, ALERT_TO_NOREPLY,

View File

@ -7460,9 +7460,7 @@ villain
vindicate vindicate
vineyard vineyard
vintage vintage
violate
violation violation
violator
violet violet
violin violin
viper viper

View File

@ -0,0 +1,29 @@
"""empty message
Revision ID: 818b0a956205
Revises: 4bc54632d9aa
Create Date: 2024-02-01 10:43:46.253184
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818b0a956205'
down_revision = '4bc54632d9aa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alias', 'last_email_log_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,44 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill alias las use"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
for row in rows:
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -228,6 +228,8 @@ def load_user(alternative_id):
sentry_sdk.set_user({"email": user.email, "id": user.id}) sentry_sdk.set_user({"email": user.email, "id": user.id})
if user.disabled: if user.disabled:
return None return None
if not user.is_active():
return None
return user return user

View File

@ -9,7 +9,7 @@
<a href='https://simplelogin.io/' aria-label="SimpleLogin"> <a href='https://simplelogin.io/' aria-label="SimpleLogin">
<img src="/static/logo-white.svg" <img src="/static/logo-white.svg"
height="30px" height="30px"
class="mt-1 mb-3" class="mt-3 mb-3"
alt="SimpleLogin logo"> alt="SimpleLogin logo">
</a> </a>
<!-- End Logo --> <!-- End Logo -->

View File

@ -89,6 +89,7 @@
Github repo Github repo
<i class="fa fa-external-link" aria-hidden="true"></i> <i class="fa fa-external-link" aria-hidden="true"></i>
</a> </a>
</div>
<div class="dropdown-item"> <div class="dropdown-item">
<a href="https://forum.simplelogin.io" <a href="https://forum.simplelogin.io"
target="_blank" target="_blank"
@ -161,8 +162,8 @@
</a> </a>
</div> </div>
</div> </div>
</div> </div>
<div class="header collapse d-lg-flex p-0" id="headerMenuCollapse"> <div class="header collapse d-lg-flex p-0" id="headerMenuCollapse">
<div class="container"> <div class="container">
<div class="row align-items-center"> <div class="row align-items-center">
<div class="col-lg order-lg-first"> <div class="col-lg order-lg-first">
@ -171,4 +172,4 @@
</div> </div>
</div> </div>
</div> </div>
</div> </div>

View File

@ -40,14 +40,16 @@ def test_get_notifications(flask_client):
def test_mark_notification_as_read(flask_client): def test_mark_notification_as_read(flask_client):
user, api_key = get_new_user_and_api_key() user, api_key = get_new_user_and_api_key()
Notification.create(id=1, user_id=user.id, message="Test message 1") notif_id = Notification.create(
user_id=user.id, message="Test message 1", flush=True
).id
Session.commit() Session.commit()
r = flask_client.post( r = flask_client.post(
url_for("api.mark_as_read", notification_id=1), url_for("api.mark_as_read", notification_id=notif_id),
headers={"Authentication": api_key.code}, headers={"Authentication": api_key.code},
) )
assert r.status_code == 200 assert r.status_code == 200
notification = Notification.first() notification = Notification.filter_by(id=notif_id).first()
assert notification.read assert notification.read

View File

@ -1,8 +1,8 @@
from app.api.serializer import get_alias_infos_with_pagination_v3 from app.api.serializer import get_alias_infos_with_pagination_v3
from app.config import PAGE_LIMIT from app.config import PAGE_LIMIT
from app.db import Session from app.db import Session
from app.models import Alias, Mailbox, Contact from app.models import Alias, Mailbox, Contact, EmailLog
from tests.utils import create_new_user from tests.utils import create_new_user, random_email
def test_get_alias_infos_with_pagination_v3(flask_client): def test_get_alias_infos_with_pagination_v3(flask_client):
@ -155,3 +155,46 @@ def test_get_alias_infos_pinned_alias(flask_client):
# pinned alias isn't included in the search # pinned alias isn't included in the search
alias_infos = get_alias_infos_with_pagination_v3(user, query="no match") alias_infos = get_alias_infos_with_pagination_v3(user, query="no match")
assert len(alias_infos) == 0 assert len(alias_infos) == 0
def test_get_alias_infos_with_no_last_email_log(flask_client):
user = create_new_user()
alias_infos = get_alias_infos_with_pagination_v3(user)
assert len(alias_infos) == 1
row = alias_infos[0]
assert row.alias.id == user.newsletter_alias_id
assert row.latest_contact is None
assert row.latest_email_log is None
def test_get_alias_infos_with_email_log_no_contact():
user = create_new_user()
contact = Contact.create(
user_id=user.id,
alias_id=user.newsletter_alias_id,
website_email="a@a.com",
reply_email=random_email(),
flush=True,
)
Contact.create(
user_id=user.id,
alias_id=user.newsletter_alias_id,
website_email="unused@a.com",
reply_email=random_email(),
flush=True,
)
EmailLog.create(
user_id=user.id,
alias_id=user.newsletter_alias_id,
contact_id=contact.id,
commit=True,
)
alias_infos = get_alias_infos_with_pagination_v3(user)
assert len(alias_infos) == 1
row = alias_infos[0]
assert row.alias.id == user.newsletter_alias_id
assert row.latest_contact is not None
assert row.latest_contact.id == contact.id
assert row.latest_email_log is not None
alias = Alias.get(id=user.newsletter_alias_id)
assert row.latest_email_log.id == alias.last_email_log_id

View File

@ -39,15 +39,17 @@ def test_cleanup_tokens(flask_client):
def test_cleanup_users(): def test_cleanup_users():
u_delete_none_id = create_new_user().id u_delete_none_id = create_new_user().id
u_delete_after = create_new_user() u_delete_grace_has_expired = create_new_user()
u_delete_after_id = u_delete_after.id u_delete_grace_has_expired_id = u_delete_grace_has_expired.id
u_delete_before = create_new_user() u_delete_grace_has_not_expired = create_new_user()
u_delete_before_id = u_delete_before.id u_delete_grace_has_not_expired_id = u_delete_grace_has_not_expired.id
now = arrow.now() now = arrow.now()
u_delete_after.delete_on = now.shift(minutes=1) u_delete_grace_has_expired.delete_on = now.shift(days=-(cron.DELETE_GRACE_DAYS + 1))
u_delete_before.delete_on = now.shift(minutes=-1) u_delete_grace_has_not_expired.delete_on = now.shift(
days=-(cron.DELETE_GRACE_DAYS - 1)
)
Session.flush() Session.flush()
cron.clear_users_scheduled_to_be_deleted() cron.clear_users_scheduled_to_be_deleted()
assert User.get(u_delete_none_id) is not None assert User.get(u_delete_none_id) is not None
assert User.get(u_delete_after_id) is not None assert User.get(u_delete_grace_has_not_expired_id) is not None
assert User.get(u_delete_before_id) is None assert User.get(u_delete_grace_has_expired_id) is None

View File

@ -57,6 +57,7 @@ from tests.utils import (
login, login,
load_eml_file, load_eml_file,
create_new_user, create_new_user,
random_email,
random_domain, random_domain,
random_token, random_token,
) )
@ -186,13 +187,14 @@ def test_parse_full_address():
def test_send_email_with_rate_control(flask_client): def test_send_email_with_rate_control(flask_client):
user = create_new_user() user = create_new_user()
email = random_email()
for _ in range(MAX_ALERT_24H): for _ in range(MAX_ALERT_24H):
assert send_email_with_rate_control( assert send_email_with_rate_control(
user, "test alert type", "abcd@gmail.com", "subject", "plaintext" user, "test alert type", email, "subject", "plaintext"
) )
assert not send_email_with_rate_control( assert not send_email_with_rate_control(
user, "test alert type", "abcd@gmail.com", "subject", "plaintext" user, "test alert type", email, "subject", "plaintext"
) )