Compare commits

..

58 Commits

Author SHA1 Message Date
a505186051 Remove Drone 2024-03-04 13:38:57 +00:00
8fcca8571a Add Gitea Actions 2024-03-04 13:38:52 +00:00
757f153042 4.39.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-23 12:00:07 +00:00
a9f65bed60 4.39.1
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-20 12:00:07 +00:00
a8ca607581 4.38.3
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-14 12:00:07 +00:00
5b47bd1654 4.38.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-06 12:00:07 +00:00
e9faf93878 4.38.0
All checks were successful
continuous-integration/drone/tag Build is passing
2024-02-03 16:55:23 +00:00
0f60f7cec9 4.37.2
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-27 12:00:07 +00:00
3180034ff8 4.37.1
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-25 12:00:08 +00:00
b3ee67213d 4.37.0
All checks were successful
continuous-integration/drone/tag Build is passing
2024-01-18 12:00:07 +00:00
aeb34f8582 4.36.8
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-28 12:00:07 +00:00
2372b8f50f 4.36.7
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-21 12:00:09 +00:00
f3050b2ca0 4.36.6
All checks were successful
continuous-integration/drone/tag Build is passing
2023-12-17 14:56:57 +00:00
ebe941c8a5 4.36.5
All checks were successful
continuous-integration/drone/tag Build is passing
2023-11-30 12:00:09 +00:00
651b2dd52a 4.36.4 2023-11-22 12:00:09 +00:00
1c580cb6f7 4.36.3 2023-11-08 12:00:06 +00:00
21765ae9d8 4.35.6 2023-11-07 12:00:06 +00:00
d661a52f43 4.35.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-05 12:00:06 +01:00
45528ff81d 4.35.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-03 12:00:06 +01:00
6170fbf127 4.35.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-10-02 12:00:06 +01:00
c8ab76066b 4.35.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-09-29 12:00:06 +01:00
357d34a42b 4.34.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-31 12:00:06 +01:00
246754872d 4.34.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-29 20:20:00 +01:00
df59d73d66 4.34.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-25 12:00:05 +01:00
ff6d78f255 4.34.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-09 12:00:05 +01:00
d59fa5fe1c Update .drone.yml 2023-08-06 17:56:31 +00:00
de1fe02200 4.33.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-06 17:51:04 +01:00
439bfc5efd Update README.md 2023-08-06 16:04:57 +00:00
0a8a420850 Update README.md 2023-08-06 16:04:47 +00:00
d119e74c2f Update README.md 2023-08-06 16:04:41 +00:00
b5485429ef Remove provenance [CI SKIP] 2023-08-06 16:01:04 +00:00
f3a7900cbd 4.32.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-08-02 16:49:54 +01:00
0f91161ff3 4.32.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-07-12 11:00:04 +00:00
1da9a3f828 4.32.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-07-11 11:00:05 +00:00
167e56bc95 4.31.0
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-30 11:00:06 +00:00
c5a0d2d513 4.30.1
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-28 11:00:03 +00:00
25ebbaa7fd 4.30.0
Some checks failed
continuous-integration/drone/tag Build is failing
2023-06-27 11:00:04 +00:00
067d94841e 4.29.4
All checks were successful
continuous-integration/drone/tag Build is passing
2023-06-07 11:00:05 +00:00
804eec0c03 4.29.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-06-01 11:00:05 +00:00
651f3f1e9c 4.28.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-05-16 11:00:09 +00:00
fd988d6ef0 4.28.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-05-10 11:00:05 +00:00
da4a8cc979 4.27.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-25 11:00:05 +00:00
299da46abe 4.26.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-20 11:00:06 +00:00
1ad8294ec3 4.25.1
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-15 11:00:05 +00:00
f5de4a9624 4.24.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-04-11 11:00:05 +00:00
5501b033e8 4.23.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-24 12:00:07 +00:00
32a4b865ef 4.22.5
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-14 12:00:06 +00:00
5d5a23dd63 Update '.drone.yml' 2023-03-08 18:32:53 +00:00
03053d0e54 4.22.4
Some checks are pending
continuous-integration/drone/tag Build is running
2023-03-08 12:00:06 +00:00
4d70590d05 4.22.3
All checks were successful
continuous-integration/drone/tag Build is passing
2023-03-01 12:00:06 +00:00
bc879c10ae 4.22.2
All checks were successful
continuous-integration/drone/tag Build is passing
2023-02-16 12:00:05 +00:00
c6b237a004 Update 'README.md' 2023-02-10 13:00:46 +00:00
722979fe19 Update 'README.md' 2023-01-27 16:29:12 +00:00
b63ada023d Update '.drone.yml' 2023-01-27 16:26:22 +00:00
8b4e4e3a2b 4.22.0
All checks were successful
continuous-integration/drone/tag Build is passing
2023-01-17 12:00:04 +00:00
32465d1220 4.21.3
All checks were successful
continuous-integration/drone/tag Build is passing
2022-12-30 16:47:07 +00:00
98bae4c86c 4.21.3 2022-12-30 16:23:27 +00:00
7ff6cf2451 add drone 2022-12-30 15:35:10 +00:00
20 changed files with 406 additions and 108 deletions

View File

@ -1,52 +0,0 @@
kind: pipeline
type: docker
name: build-multiarch-images
platform:
os: linux
arch: amd64
steps:
- name: make-tags
image: node
commands:
- echo -n "${DRONE_TAG}, latest" > .tags
- name: build
image: thegeeklab/drone-docker-buildx
privileged: true
settings:
provenance: false
dockerfile: app/Dockerfile
context: app
registry: git.mrmeeb.stream
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: git.mrmeeb.stream/mrmeeb/simple-login
platforms:
- linux/arm64
- linux/amd64
- name: notify
image: plugins/slack
when:
status:
- success
- failure
- killed
settings:
webhook:
from_secret: slack_webhook
icon_url:
from_secret: slack_avatar
trigger:
event:
include:
- tag
ref:
include:
- refs/tags/**

View File

@ -0,0 +1,195 @@
name: Build-Release-Image
on:
push:
tags:
- '*'
env:
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login-dev
TEA_VERSION: 0.9.2
jobs:
Build-Image:
runs-on: [ubuntu-docker-latest, "${{ matrix.platform }}"]
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
#- name: Prepare tags
# id: meta
# uses: docker/metadata-action@v5
# with:
# images: ${{ env.CONTAINER_NAME }}
# tags: |
# type=pep440,pattern={{version}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Gitea Container Registry
uses: docker/login-action@v3
with:
registry: git.mrmeeb.stream
username: ${{ env.GITHUB_ACTOR }}
password: ${{ secrets.GTCR_TOKEN }}
- name: Build and push by digest
uses: docker/build-push-action@v5
id: build
with:
context: ./app
platforms: ${{ matrix.platform }}
provenance: false
outputs: type=image,name=${{ env.CONTAINER_NAME }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v3
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
- name: Notify
uses: rjstone/discord-webhook-notify@v1
if: failure()
with:
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
username: Gitea
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
Merge-Images:
runs-on: ubuntu-docker-latest
needs: [Build-Image]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get tag
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
- name: Download digests
uses: actions/download-artifact@v3
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
#- name: Prepare Docker metadata
# id: meta
# uses: docker/metadata-action@v5
# with:
# images: ${{ env.CONTAINER_NAME }}
- name: Login to Gitea Container Registry
uses: docker/login-action@v3
with:
registry: git.mrmeeb.stream
username: ${{ env.GITHUB_ACTOR }}
password: ${{ secrets.GTCR_TOKEN }}
- name: Create manifest latest
working-directory: /tmp/digests
run: |
docker manifest create ${{ env.CONTAINER_NAME }}:latest \
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
docker manifest inspect ${{ env.CONTAINER_NAME }}:latest
docker manifest push ${{ env.CONTAINER_NAME }}:latest
- name: Create manifest tagged
working-directory: /tmp/digests
run: |
docker manifest create ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} \
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
docker manifest inspect ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
docker manifest push ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
# Disabled due to https://github.com/go-gitea/gitea/issues/29563
#- name: Create manifest list and push
# working-directory: /tmp/digests
# run: |
# echo $DOCKER_METADATA_OUTPUT_JSON
# echo $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
# docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
#- name: Inspect image
# run: |
# docker buildx imagetools inspect ${{ env.CONTAINER_NAME }}:${{ steps.meta.outputs.version }}
- name: Notify
uses: rjstone/discord-webhook-notify@v1
if: failure()
with:
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
username: Gitea
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
Create-Release:
runs-on: [ubuntu-latest, linux/amd64]
needs: [Merge-Images]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get tag
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
- name: Prepare tea
run: |
# Download tea from Gitea release page
echo "Downloading Tea v${{ env.TEA_VERSION }}" && \
wget -q -O tea https://gitea.com/gitea/tea/releases/download/v${{ env.TEA_VERSION }}/tea-${{ env.TEA_VERSION }}-linux-amd64 && \
echo "Downloaded Tea" && \
chmod +x tea && \
# Login to Gitea
echo "Logging in to Gitea using Tea" && \
./tea login add --name SimpleLogin --url https://git.mrmeeb.stream --token ${{ secrets.GITHUB_TOKEN }} && \
echo "Done"
- name: Make release
run: |
echo "Creating release" && \
./tea release create --login "SimpleLogin" --repo ${{ env.GITHUB_REPOSITORY }} --tag ${{ env.RELEASE_VERSION }} -t ${{ env.RELEASE_VERSION }} -n "Triggered by release of v${{ env.RELEASE_VERSION }} by the SimpleLogin team. <a href=\"https://github.com/simple-login/app/releases/tag/v${{ env.RELEASE_VERSION }}\" target=\"_blank\">View the changelog</a>" && \
echo "Done"
- name: Notify
uses: rjstone/discord-webhook-notify@v1
if: failure()
with:
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
username: Gitea
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
Notify:
runs-on: ubuntu-latest
needs: [Build-Image, Merge-Images, Create-Release]
steps:
- name: Notify
uses: rjstone/discord-webhook-notify@v1
if: always()
with:
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
username: Gitea
avatarUrl: ${{ vars.RUNNER_ICON_URL }}

View File

@ -168,6 +168,8 @@ class NewUserStrategy(ClientMergeStrategy):
class ExistingUnlinkedUserStrategy(ClientMergeStrategy): class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult: def process(self) -> LinkResult:
# IF it was scheduled to be deleted. Unschedule it.
self.user.delete_on = None
partner_user = ensure_partner_user_exists_for_user( partner_user = ensure_partner_user_exists_for_user(
self.link_request, self.user, self.partner self.link_request, self.user, self.partner
) )
@ -246,6 +248,8 @@ def link_user(
) -> LinkResult: ) -> LinkResult:
# Sanitize email just in case # Sanitize email just in case
link_request.email = sanitize_email(link_request.email) link_request.email = sanitize_email(link_request.email)
# If it was scheduled to be deleted. Unschedule it.
current_user.delete_on = None
partner_user = ensure_partner_user_exists_for_user( partner_user = ensure_partner_user_exists_for_user(
link_request, current_user, partner link_request, current_user, partner
) )

View File

@ -33,6 +33,9 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if g.user.disabled: if g.user.disabled:
return jsonify(error="Disabled account"), 403 return jsonify(error="Disabled account"), 403
if not g.user.is_active():
return jsonify(error="Account does not exist"), 401
g.api_key = api_key g.api_key = api_key
return None return None

View File

@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
q = q.order_by(Alias.pinned.desc()) q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc()) q = q.order_by(latest_activity.desc())
q = list(q.limit(page_limit).offset(page_id * page_size)) q = q.limit(page_limit).offset(page_id * page_size)
ret = [] ret = []
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q: for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
ret.append( ret.append(
AliasInfo( AliasInfo(
alias=alias, alias=alias,
@ -358,7 +358,6 @@ def construct_alias_query(user: User):
else_=0, else_=0,
) )
).label("nb_forward"), ).label("nb_forward"),
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
) )
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True) .join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
.filter(Alias.user_id == user.id) .filter(Alias.user_id == user.id)
@ -366,14 +365,6 @@ def construct_alias_query(user: User):
.subquery() .subquery()
) )
alias_contact_subquery = (
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
return ( return (
Session.query( Session.query(
Alias, Alias,
@ -385,23 +376,7 @@ def construct_alias_query(user: User):
) )
.options(joinedload(Alias.hibp_breaches)) .options(joinedload(Alias.hibp_breaches))
.options(joinedload(Alias.custom_domain)) .options(joinedload(Alias.custom_domain))
.join(Contact, Alias.id == Contact.alias_id, isouter=True) .join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True) .join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
.filter(Alias.id == alias_activity_subquery.c.id) .filter(Alias.id == alias_activity_subquery.c.id)
.filter(Alias.id == alias_contact_subquery.c.id)
.filter(
or_(
EmailLog.created_at
== alias_activity_subquery.c.latest_email_log_created_at,
and_(
# no email log yet for this alias
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
# to make sure only 1 contact is returned in this case
or_(
Contact.id == alias_contact_subquery.c.max_contact_id,
alias_contact_subquery.c.max_contact_id.is_(None),
),
),
)
)
) )

View File

@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
refused_email = RefusedEmail.create( refused_email = RefusedEmail.create(
full_report_path=s3_report_path, user_id=alias.user_id, flush=True full_report_path=s3_report_path, user_id=alias.user_id, flush=True
) )
return EmailLog.create( email_log = EmailLog.create(
user_id=alias.user_id, user_id=alias.user_id,
mailbox_id=alias.mailbox_id, mailbox_id=alias.mailbox_id,
contact_id=contact.id, contact_id=contact.id,
@ -142,6 +142,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
blocked=True, blocked=True,
commit=True, commit=True,
) )
return email_log
def apply_dmarc_policy_for_reply_phase( def apply_dmarc_policy_for_reply_phase(

View File

@ -727,6 +727,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return True return True
def is_active(self) -> bool:
if self.delete_on is None:
return True
return self.delete_on < arrow.now()
def in_trial(self): def in_trial(self):
"""return True if user does not have lifetime licence or an active subscription AND is in trial period""" """return True if user does not have lifetime licence or an active subscription AND is in trial period"""
if self.lifetime_or_active_subscription(): if self.lifetime_or_active_subscription():
@ -828,6 +833,9 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
Whether user can create a new alias. User can't create a new alias if Whether user can create a new alias. User can't create a new alias if
- has more than 15 aliases in the free plan, *even in the free trial* - has more than 15 aliases in the free plan, *even in the free trial*
""" """
if not self.is_active():
return False
if self.disabled: if self.disabled:
return False return False
@ -908,7 +916,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return sub return sub
def verified_custom_domains(self) -> List["CustomDomain"]: def verified_custom_domains(self) -> List["CustomDomain"]:
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all() return (
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
.order_by(CustomDomain.domain.asc())
.all()
)
def mailboxes(self) -> List["Mailbox"]: def mailboxes(self) -> List["Mailbox"]:
"""list of mailbox that user own""" """list of mailbox that user own"""
@ -1496,6 +1508,8 @@ class Alias(Base, ModelMixin):
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True) TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
) )
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
__table_args__ = ( __table_args__ = (
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"), Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
# index on note column using pg_trgm # index on note column using pg_trgm
@ -2055,6 +2069,20 @@ class EmailLog(Base, ModelMixin):
def get_dashboard_url(self): def get_dashboard_url(self):
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}" return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
@classmethod
def create(cls, *args, **kwargs):
commit = kwargs.pop("commit", False)
email_log = super().create(*args, **kwargs)
Session.flush()
if "alias_id" in kwargs:
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
Session.execute(
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
)
if commit:
Session.commit()
return email_log
def __repr__(self): def __repr__(self):
return f"<EmailLog {self.id}>" return f"<EmailLog {self.id}>"

View File

@ -140,7 +140,7 @@ def authorize():
Scope=Scope, Scope=Scope,
) )
else: # POST - user allows or denies else: # POST - user allows or denies
if not current_user.is_authenticated or not current_user.is_active: if not current_user.is_authenticated or not current_user.is_active():
LOG.i( LOG.i(
"Attempt to validate a OAUth allow request by an unauthenticated user" "Attempt to validate a OAUth allow request by an unauthenticated user"
) )

View File

@ -6,7 +6,7 @@ import redis.exceptions
import werkzeug.exceptions import werkzeug.exceptions
from limits.storage import RedisStorage from limits.storage import RedisStorage
from app.log import log from app.log import LOG
lock_redis: Optional[RedisStorage] = None lock_redis: Optional[RedisStorage] = None
@ -22,17 +22,19 @@ def check_bucket_limit(
bucket_seconds: int = 3600, bucket_seconds: int = 3600,
): ):
# Calculate current bucket time # Calculate current bucket time
bucket_id = int(datetime.utcnow().timestamp()) % bucket_seconds int_time = int(datetime.utcnow().timestamp())
bucket_id = int_time - (int_time % bucket_seconds)
bucket_lock_name = f"bl:{lock_name}:{bucket_id}" bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
if not lock_redis: if not lock_redis:
return return
try: try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds) value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits: if value > max_hits:
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
newrelic.agent.record_custom_event( newrelic.agent.record_custom_event(
"BucketRateLimit", "BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds}, {"lock_name": lock_name, "bucket_seconds": bucket_seconds},
) )
raise werkzeug.exceptions.TooManyRequests() raise werkzeug.exceptions.TooManyRequests()
except (redis.exceptions.RedisError, AttributeError): except (redis.exceptions.RedisError, AttributeError):
log.e("Cannot connect to redis") LOG.e("Cannot connect to redis")

View File

@ -62,6 +62,8 @@ from app.proton.utils import get_proton_partner
from app.utils import sanitize_email from app.utils import sanitize_email
from server import create_light_app from server import create_light_app
DELETE_GRACE_DAYS = 30
def notify_trial_end(): def notify_trial_end():
for user in User.filter( for user in User.filter(
@ -1126,14 +1128,19 @@ def notify_hibp():
Session.commit() Session.commit()
def clear_users_scheduled_to_be_deleted(): def clear_users_scheduled_to_be_deleted(dry_run=False):
users = User.filter( users = User.filter(
and_(User.delete_on.isnot(None), User.delete_on < arrow.now()) and_(
User.delete_on.isnot(None),
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
)
).all() ).all()
for user in users: for user in users:
LOG.i( LOG.i(
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}" f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
) )
if dry_run:
continue
User.delete(user.id) User.delete(user.id)
Session.commit() Session.commit()
@ -1206,4 +1213,4 @@ if __name__ == "__main__":
load_unsent_mails_from_fs_and_resend() load_unsent_mails_from_fs_and_resend()
elif args.job == "delete_scheduled_users": elif args.job == "delete_scheduled_users":
LOG.d("Deleting users scheduled to be deleted") LOG.d("Deleting users scheduled to be deleted")
clear_users_scheduled_to_be_deleted() clear_users_scheduled_to_be_deleted(dry_run=True)

View File

@ -62,7 +62,7 @@ jobs:
captureStderr: true captureStderr: true
- name: SimpleLogin delete users scheduled to be deleted - name: SimpleLogin delete users scheduled to be deleted
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users command: python /code/cron.py -j delete_scheduled_users
shell: /bin/bash shell: /bin/bash
schedule: "15 11 * * *" schedule: "15 11 * * *"
captureStderr: true captureStderr: true

View File

@ -636,6 +636,10 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
user = alias.user user = alias.user
if not user.is_active():
LOG.w(f"User {user} has been soft deleted")
return False, status.E502
if not user.can_send_or_receive(): if not user.can_send_or_receive():
LOG.i(f"User {user} cannot receive emails") LOG.i(f"User {user} cannot receive emails")
if should_ignore_bounce(envelope.mail_from): if should_ignore_bounce(envelope.mail_from):
@ -1055,6 +1059,9 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if not contact: if not contact:
LOG.w(f"No contact with {reply_email} as reverse alias") LOG.w(f"No contact with {reply_email} as reverse alias")
return False, status.E502 return False, status.E502
if not contact.user.is_active():
LOG.w(f"User {contact.user} has been soft deleted")
return False, status.E502
alias = contact.alias alias = contact.alias
alias_address: str = contact.alias.email alias_address: str = contact.alias.email
@ -1921,6 +1928,9 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
contact, contact,
alias, alias,
) )
if not email_log.user.is_active():
LOG.d(f"User {email_log.user} is not active")
return status.E510
if email_log.is_reply: if email_log.is_reply:
content_type = msg.get_content_type().lower() content_type = msg.get_content_type().lower()
@ -1982,6 +1992,9 @@ def send_no_reply_response(mail_from: str, msg: Message):
if not mailbox: if not mailbox:
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY)) LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
return return
if not mailbox.user.is_active():
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
return
send_email_at_most_times( send_email_at_most_times(
mailbox.user, mailbox.user,
ALERT_TO_NOREPLY, ALERT_TO_NOREPLY,

View File

@ -7460,9 +7460,7 @@ villain
vindicate vindicate
vineyard vineyard
vintage vintage
violate
violation violation
violator
violet violet
violin violin
viper viper

View File

@ -0,0 +1,29 @@
"""empty message
Revision ID: 818b0a956205
Revises: 4bc54632d9aa
Create Date: 2024-02-01 10:43:46.253184
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818b0a956205'
down_revision = '4bc54632d9aa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alias', 'last_email_log_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,44 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill alias las use"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
for row in rows:
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -228,6 +228,8 @@ def load_user(alternative_id):
sentry_sdk.set_user({"email": user.email, "id": user.id}) sentry_sdk.set_user({"email": user.email, "id": user.id})
if user.disabled: if user.disabled:
return None return None
if not user.is_active():
return None
return user return user

View File

@ -40,14 +40,16 @@ def test_get_notifications(flask_client):
def test_mark_notification_as_read(flask_client): def test_mark_notification_as_read(flask_client):
user, api_key = get_new_user_and_api_key() user, api_key = get_new_user_and_api_key()
Notification.create(id=1, user_id=user.id, message="Test message 1") notif_id = Notification.create(
user_id=user.id, message="Test message 1", flush=True
).id
Session.commit() Session.commit()
r = flask_client.post( r = flask_client.post(
url_for("api.mark_as_read", notification_id=1), url_for("api.mark_as_read", notification_id=notif_id),
headers={"Authentication": api_key.code}, headers={"Authentication": api_key.code},
) )
assert r.status_code == 200 assert r.status_code == 200
notification = Notification.first() notification = Notification.filter_by(id=notif_id).first()
assert notification.read assert notification.read

View File

@ -1,8 +1,8 @@
from app.api.serializer import get_alias_infos_with_pagination_v3 from app.api.serializer import get_alias_infos_with_pagination_v3
from app.config import PAGE_LIMIT from app.config import PAGE_LIMIT
from app.db import Session from app.db import Session
from app.models import Alias, Mailbox, Contact from app.models import Alias, Mailbox, Contact, EmailLog
from tests.utils import create_new_user from tests.utils import create_new_user, random_email
def test_get_alias_infos_with_pagination_v3(flask_client): def test_get_alias_infos_with_pagination_v3(flask_client):
@ -155,3 +155,46 @@ def test_get_alias_infos_pinned_alias(flask_client):
# pinned alias isn't included in the search # pinned alias isn't included in the search
alias_infos = get_alias_infos_with_pagination_v3(user, query="no match") alias_infos = get_alias_infos_with_pagination_v3(user, query="no match")
assert len(alias_infos) == 0 assert len(alias_infos) == 0
def test_get_alias_infos_with_no_last_email_log(flask_client):
user = create_new_user()
alias_infos = get_alias_infos_with_pagination_v3(user)
assert len(alias_infos) == 1
row = alias_infos[0]
assert row.alias.id == user.newsletter_alias_id
assert row.latest_contact is None
assert row.latest_email_log is None
def test_get_alias_infos_with_email_log_no_contact():
user = create_new_user()
contact = Contact.create(
user_id=user.id,
alias_id=user.newsletter_alias_id,
website_email="a@a.com",
reply_email=random_email(),
flush=True,
)
Contact.create(
user_id=user.id,
alias_id=user.newsletter_alias_id,
website_email="unused@a.com",
reply_email=random_email(),
flush=True,
)
EmailLog.create(
user_id=user.id,
alias_id=user.newsletter_alias_id,
contact_id=contact.id,
commit=True,
)
alias_infos = get_alias_infos_with_pagination_v3(user)
assert len(alias_infos) == 1
row = alias_infos[0]
assert row.alias.id == user.newsletter_alias_id
assert row.latest_contact is not None
assert row.latest_contact.id == contact.id
assert row.latest_email_log is not None
alias = Alias.get(id=user.newsletter_alias_id)
assert row.latest_email_log.id == alias.last_email_log_id

View File

@ -39,15 +39,17 @@ def test_cleanup_tokens(flask_client):
def test_cleanup_users(): def test_cleanup_users():
u_delete_none_id = create_new_user().id u_delete_none_id = create_new_user().id
u_delete_after = create_new_user() u_delete_grace_has_expired = create_new_user()
u_delete_after_id = u_delete_after.id u_delete_grace_has_expired_id = u_delete_grace_has_expired.id
u_delete_before = create_new_user() u_delete_grace_has_not_expired = create_new_user()
u_delete_before_id = u_delete_before.id u_delete_grace_has_not_expired_id = u_delete_grace_has_not_expired.id
now = arrow.now() now = arrow.now()
u_delete_after.delete_on = now.shift(minutes=1) u_delete_grace_has_expired.delete_on = now.shift(days=-(cron.DELETE_GRACE_DAYS + 1))
u_delete_before.delete_on = now.shift(minutes=-1) u_delete_grace_has_not_expired.delete_on = now.shift(
days=-(cron.DELETE_GRACE_DAYS - 1)
)
Session.flush() Session.flush()
cron.clear_users_scheduled_to_be_deleted() cron.clear_users_scheduled_to_be_deleted()
assert User.get(u_delete_none_id) is not None assert User.get(u_delete_none_id) is not None
assert User.get(u_delete_after_id) is not None assert User.get(u_delete_grace_has_not_expired_id) is not None
assert User.get(u_delete_before_id) is None assert User.get(u_delete_grace_has_expired_id) is None

View File

@ -57,6 +57,7 @@ from tests.utils import (
login, login,
load_eml_file, load_eml_file,
create_new_user, create_new_user,
random_email,
random_domain, random_domain,
random_token, random_token,
) )
@ -186,13 +187,14 @@ def test_parse_full_address():
def test_send_email_with_rate_control(flask_client): def test_send_email_with_rate_control(flask_client):
user = create_new_user() user = create_new_user()
email = random_email()
for _ in range(MAX_ALERT_24H): for _ in range(MAX_ALERT_24H):
assert send_email_with_rate_control( assert send_email_with_rate_control(
user, "test alert type", "abcd@gmail.com", "subject", "plaintext" user, "test alert type", email, "subject", "plaintext"
) )
assert not send_email_with_rate_control( assert not send_email_with_rate_control(
user, "test alert type", "abcd@gmail.com", "subject", "plaintext" user, "test alert type", email, "subject", "plaintext"
) )