4.39.1
This commit is contained in:
parent
ead425e0c2
commit
75f45d9365
app
app
migrations/versions
oneshot
tests
@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
|
|||||||
q = q.order_by(Alias.pinned.desc())
|
q = q.order_by(Alias.pinned.desc())
|
||||||
q = q.order_by(latest_activity.desc())
|
q = q.order_by(latest_activity.desc())
|
||||||
|
|
||||||
q = list(q.limit(page_limit).offset(page_id * page_size))
|
q = q.limit(page_limit).offset(page_id * page_size)
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
|
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
|
||||||
ret.append(
|
ret.append(
|
||||||
AliasInfo(
|
AliasInfo(
|
||||||
alias=alias,
|
alias=alias,
|
||||||
@ -358,7 +358,6 @@ def construct_alias_query(user: User):
|
|||||||
else_=0,
|
else_=0,
|
||||||
)
|
)
|
||||||
).label("nb_forward"),
|
).label("nb_forward"),
|
||||||
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
|
|
||||||
)
|
)
|
||||||
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
|
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
|
||||||
.filter(Alias.user_id == user.id)
|
.filter(Alias.user_id == user.id)
|
||||||
@ -366,14 +365,6 @@ def construct_alias_query(user: User):
|
|||||||
.subquery()
|
.subquery()
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_contact_subquery = (
|
|
||||||
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
|
|
||||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
|
||||||
.filter(Alias.user_id == user.id)
|
|
||||||
.group_by(Alias.id)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
Session.query(
|
Session.query(
|
||||||
Alias,
|
Alias,
|
||||||
@ -385,23 +376,7 @@ def construct_alias_query(user: User):
|
|||||||
)
|
)
|
||||||
.options(joinedload(Alias.hibp_breaches))
|
.options(joinedload(Alias.hibp_breaches))
|
||||||
.options(joinedload(Alias.custom_domain))
|
.options(joinedload(Alias.custom_domain))
|
||||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
.join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
|
||||||
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
|
.join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
|
||||||
.filter(Alias.id == alias_activity_subquery.c.id)
|
.filter(Alias.id == alias_activity_subquery.c.id)
|
||||||
.filter(Alias.id == alias_contact_subquery.c.id)
|
|
||||||
.filter(
|
|
||||||
or_(
|
|
||||||
EmailLog.created_at
|
|
||||||
== alias_activity_subquery.c.latest_email_log_created_at,
|
|
||||||
and_(
|
|
||||||
# no email log yet for this alias
|
|
||||||
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
|
|
||||||
# to make sure only 1 contact is returned in this case
|
|
||||||
or_(
|
|
||||||
Contact.id == alias_contact_subquery.c.max_contact_id,
|
|
||||||
alias_contact_subquery.c.max_contact_id.is_(None),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
|
|||||||
refused_email = RefusedEmail.create(
|
refused_email = RefusedEmail.create(
|
||||||
full_report_path=s3_report_path, user_id=alias.user_id, flush=True
|
full_report_path=s3_report_path, user_id=alias.user_id, flush=True
|
||||||
)
|
)
|
||||||
return EmailLog.create(
|
email_log = EmailLog.create(
|
||||||
user_id=alias.user_id,
|
user_id=alias.user_id,
|
||||||
mailbox_id=alias.mailbox_id,
|
mailbox_id=alias.mailbox_id,
|
||||||
contact_id=contact.id,
|
contact_id=contact.id,
|
||||||
@ -142,6 +142,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
|
|||||||
blocked=True,
|
blocked=True,
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
return email_log
|
||||||
|
|
||||||
|
|
||||||
def apply_dmarc_policy_for_reply_phase(
|
def apply_dmarc_policy_for_reply_phase(
|
||||||
|
@ -1500,6 +1500,8 @@ class Alias(Base, ModelMixin):
|
|||||||
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
|
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
|
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
|
||||||
# index on note column using pg_trgm
|
# index on note column using pg_trgm
|
||||||
@ -2059,6 +2061,20 @@ class EmailLog(Base, ModelMixin):
|
|||||||
def get_dashboard_url(self):
|
def get_dashboard_url(self):
|
||||||
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
|
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, *args, **kwargs):
|
||||||
|
commit = kwargs.pop("commit", False)
|
||||||
|
email_log = super().create(*args, **kwargs)
|
||||||
|
Session.flush()
|
||||||
|
if "alias_id" in kwargs:
|
||||||
|
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
|
||||||
|
Session.execute(
|
||||||
|
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
|
||||||
|
)
|
||||||
|
if commit:
|
||||||
|
Session.commit()
|
||||||
|
return email_log
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<EmailLog {self.id}>"
|
return f"<EmailLog {self.id}>"
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ import redis.exceptions
|
|||||||
import werkzeug.exceptions
|
import werkzeug.exceptions
|
||||||
from limits.storage import RedisStorage
|
from limits.storage import RedisStorage
|
||||||
|
|
||||||
from app.log import log
|
from app.log import LOG
|
||||||
|
|
||||||
lock_redis: Optional[RedisStorage] = None
|
lock_redis: Optional[RedisStorage] = None
|
||||||
|
|
||||||
@ -22,17 +22,19 @@ def check_bucket_limit(
|
|||||||
bucket_seconds: int = 3600,
|
bucket_seconds: int = 3600,
|
||||||
):
|
):
|
||||||
# Calculate current bucket time
|
# Calculate current bucket time
|
||||||
bucket_id = int(datetime.utcnow().timestamp()) % bucket_seconds
|
int_time = int(datetime.utcnow().timestamp())
|
||||||
|
bucket_id = int_time - (int_time % bucket_seconds)
|
||||||
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
|
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
|
||||||
if not lock_redis:
|
if not lock_redis:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
|
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
|
||||||
if value > max_hits:
|
if value > max_hits:
|
||||||
|
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
|
||||||
newrelic.agent.record_custom_event(
|
newrelic.agent.record_custom_event(
|
||||||
"BucketRateLimit",
|
"BucketRateLimit",
|
||||||
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
|
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
|
||||||
)
|
)
|
||||||
raise werkzeug.exceptions.TooManyRequests()
|
raise werkzeug.exceptions.TooManyRequests()
|
||||||
except (redis.exceptions.RedisError, AttributeError):
|
except (redis.exceptions.RedisError, AttributeError):
|
||||||
log.e("Cannot connect to redis")
|
LOG.e("Cannot connect to redis")
|
||||||
|
29
app/migrations/versions/2024_020110_818b0a956205_.py
Normal file
29
app/migrations/versions/2024_020110_818b0a956205_.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 818b0a956205
|
||||||
|
Revises: 4bc54632d9aa
|
||||||
|
Create Date: 2024-02-01 10:43:46.253184
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '818b0a956205'
|
||||||
|
down_revision = '4bc54632d9aa'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('alias', 'last_email_log_id')
|
||||||
|
# ### end Alembic commands ###
|
44
app/oneshot/backfill_email_log.py
Normal file
44
app/oneshot/backfill_email_log.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy import func
|
||||||
|
from app.models import Alias
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="Backfill alias", description="Backfill alias las use"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
|
||||||
|
)
|
||||||
|
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
alias_id_start = args.start_alias_id
|
||||||
|
max_alias_id = args.end_alias_id
|
||||||
|
if max_alias_id == 0:
|
||||||
|
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||||
|
|
||||||
|
print(f"Checking alias {alias_id_start} to {max_alias_id}")
|
||||||
|
step = 1000
|
||||||
|
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
|
||||||
|
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
|
||||||
|
updated = 0
|
||||||
|
start_time = time.time()
|
||||||
|
for batch_start in range(alias_id_start, max_alias_id, step):
|
||||||
|
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
|
||||||
|
for row in rows:
|
||||||
|
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
|
||||||
|
Session.commit()
|
||||||
|
updated += 1
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
time_per_alias = elapsed / (updated + 1)
|
||||||
|
last_batch_id = batch_start + step
|
||||||
|
remaining = max_alias_id - last_batch_id
|
||||||
|
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
|
||||||
|
hours_remaining = time_remaining / 3600.0
|
||||||
|
print(
|
||||||
|
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
|
||||||
|
)
|
||||||
|
print("")
|
@ -40,14 +40,16 @@ def test_get_notifications(flask_client):
|
|||||||
def test_mark_notification_as_read(flask_client):
|
def test_mark_notification_as_read(flask_client):
|
||||||
user, api_key = get_new_user_and_api_key()
|
user, api_key = get_new_user_and_api_key()
|
||||||
|
|
||||||
Notification.create(id=1, user_id=user.id, message="Test message 1")
|
notif_id = Notification.create(
|
||||||
|
user_id=user.id, message="Test message 1", flush=True
|
||||||
|
).id
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
r = flask_client.post(
|
r = flask_client.post(
|
||||||
url_for("api.mark_as_read", notification_id=1),
|
url_for("api.mark_as_read", notification_id=notif_id),
|
||||||
headers={"Authentication": api_key.code},
|
headers={"Authentication": api_key.code},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
notification = Notification.first()
|
notification = Notification.filter_by(id=notif_id).first()
|
||||||
assert notification.read
|
assert notification.read
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
from app.api.serializer import get_alias_infos_with_pagination_v3
|
from app.api.serializer import get_alias_infos_with_pagination_v3
|
||||||
from app.config import PAGE_LIMIT
|
from app.config import PAGE_LIMIT
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Alias, Mailbox, Contact
|
from app.models import Alias, Mailbox, Contact, EmailLog
|
||||||
from tests.utils import create_new_user
|
from tests.utils import create_new_user, random_email
|
||||||
|
|
||||||
|
|
||||||
def test_get_alias_infos_with_pagination_v3(flask_client):
|
def test_get_alias_infos_with_pagination_v3(flask_client):
|
||||||
@ -155,3 +155,46 @@ def test_get_alias_infos_pinned_alias(flask_client):
|
|||||||
# pinned alias isn't included in the search
|
# pinned alias isn't included in the search
|
||||||
alias_infos = get_alias_infos_with_pagination_v3(user, query="no match")
|
alias_infos = get_alias_infos_with_pagination_v3(user, query="no match")
|
||||||
assert len(alias_infos) == 0
|
assert len(alias_infos) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_infos_with_no_last_email_log(flask_client):
|
||||||
|
user = create_new_user()
|
||||||
|
alias_infos = get_alias_infos_with_pagination_v3(user)
|
||||||
|
assert len(alias_infos) == 1
|
||||||
|
row = alias_infos[0]
|
||||||
|
assert row.alias.id == user.newsletter_alias_id
|
||||||
|
assert row.latest_contact is None
|
||||||
|
assert row.latest_email_log is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_alias_infos_with_email_log_no_contact():
|
||||||
|
user = create_new_user()
|
||||||
|
contact = Contact.create(
|
||||||
|
user_id=user.id,
|
||||||
|
alias_id=user.newsletter_alias_id,
|
||||||
|
website_email="a@a.com",
|
||||||
|
reply_email=random_email(),
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
Contact.create(
|
||||||
|
user_id=user.id,
|
||||||
|
alias_id=user.newsletter_alias_id,
|
||||||
|
website_email="unused@a.com",
|
||||||
|
reply_email=random_email(),
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
EmailLog.create(
|
||||||
|
user_id=user.id,
|
||||||
|
alias_id=user.newsletter_alias_id,
|
||||||
|
contact_id=contact.id,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
alias_infos = get_alias_infos_with_pagination_v3(user)
|
||||||
|
assert len(alias_infos) == 1
|
||||||
|
row = alias_infos[0]
|
||||||
|
assert row.alias.id == user.newsletter_alias_id
|
||||||
|
assert row.latest_contact is not None
|
||||||
|
assert row.latest_contact.id == contact.id
|
||||||
|
assert row.latest_email_log is not None
|
||||||
|
alias = Alias.get(id=user.newsletter_alias_id)
|
||||||
|
assert row.latest_email_log.id == alias.last_email_log_id
|
||||||
|
@ -57,6 +57,7 @@ from tests.utils import (
|
|||||||
login,
|
login,
|
||||||
load_eml_file,
|
load_eml_file,
|
||||||
create_new_user,
|
create_new_user,
|
||||||
|
random_email,
|
||||||
random_domain,
|
random_domain,
|
||||||
random_token,
|
random_token,
|
||||||
)
|
)
|
||||||
@ -186,13 +187,14 @@ def test_parse_full_address():
|
|||||||
|
|
||||||
def test_send_email_with_rate_control(flask_client):
|
def test_send_email_with_rate_control(flask_client):
|
||||||
user = create_new_user()
|
user = create_new_user()
|
||||||
|
email = random_email()
|
||||||
|
|
||||||
for _ in range(MAX_ALERT_24H):
|
for _ in range(MAX_ALERT_24H):
|
||||||
assert send_email_with_rate_control(
|
assert send_email_with_rate_control(
|
||||||
user, "test alert type", "abcd@gmail.com", "subject", "plaintext"
|
user, "test alert type", email, "subject", "plaintext"
|
||||||
)
|
)
|
||||||
assert not send_email_with_rate_control(
|
assert not send_email_with_rate_control(
|
||||||
user, "test alert type", "abcd@gmail.com", "subject", "plaintext"
|
user, "test alert type", email, "subject", "plaintext"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user