Compare commits
53 Commits
a505186051
...
4.38.2
| Author | SHA1 | Date | |
|---|---|---|---|
| 6c910d62c5 | |||
| 99ffd1ec0c | |||
| eda940f8b2 | |||
| 1dad582523 | |||
| e516266a27 | |||
| 850fc95477 | |||
| d172825900 | |||
| 026865e5bf | |||
| add94ef2a2 | |||
| 1081400948 | |||
| 5776128905 | |||
| d661860f4c | |||
| 0a52e32972 | |||
| 703dcbd0eb | |||
| ce7ed69547 | |||
| 4f5564df16 | |||
| 2fee569131 | |||
| 7ea45d6f5d | |||
| 6d24db50bd | |||
| 88f270c6a1 | |||
| 0962b1cf29 | |||
| 6051d72691 | |||
| c31a75a9ef | |||
| ef289385ff | |||
| 9b12a2ad33 | |||
| 8eb19d88f3 | |||
| e36e9d3077 | |||
| b2430cbc5b | |||
| 1258115397 | |||
| 38c134d903 | |||
| cd77e4cc2d | |||
| 87aedf3207 | |||
| 3523c9fc15 | |||
| a6f4995cb5 | |||
| 727f61a35e | |||
| ce5124605a | |||
| 2c82b03f8d | |||
| 1b7a6223ac | |||
| 75331c62a4 | |||
| 3f68a3e640 | |||
| 8ee4f9462e | |||
| 822855d584 | |||
| 1a6a7e079b | |||
| 5210cb6515 | |||
| b643f0644b | |||
| 5d093db4f6 | |||
| 0b16fcac67 | |||
| a0d294da53 | |||
| c3f755aede | |||
| 0aea62c222 | |||
| 92f4ad2237 | |||
| 20da343c54 | |||
| 02776e8478 |
52
.drone.yml
Normal file
52
.drone.yml
Normal file
@ -0,0 +1,52 @@
|
||||
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: build-multiarch-images
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: make-tags
|
||||
image: node
|
||||
commands:
|
||||
- echo -n "${DRONE_TAG}, latest" > .tags
|
||||
|
||||
- name: build
|
||||
image: thegeeklab/drone-docker-buildx
|
||||
privileged: true
|
||||
settings:
|
||||
provenance: false
|
||||
dockerfile: app/Dockerfile
|
||||
context: app
|
||||
registry: git.mrmeeb.stream
|
||||
username:
|
||||
from_secret: docker_username
|
||||
password:
|
||||
from_secret: docker_password
|
||||
repo: git.mrmeeb.stream/mrmeeb/simple-login
|
||||
platforms:
|
||||
- linux/arm64
|
||||
- linux/amd64
|
||||
|
||||
- name: notify
|
||||
image: plugins/slack
|
||||
when:
|
||||
status:
|
||||
- success
|
||||
- failure
|
||||
- killed
|
||||
settings:
|
||||
webhook:
|
||||
from_secret: slack_webhook
|
||||
icon_url:
|
||||
from_secret: slack_avatar
|
||||
|
||||
trigger:
|
||||
event:
|
||||
include:
|
||||
- tag
|
||||
ref:
|
||||
include:
|
||||
- refs/tags/**
|
||||
@ -1,195 +0,0 @@
|
||||
name: Build-Release-Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login-dev
|
||||
TEA_VERSION: 0.9.2
|
||||
|
||||
jobs:
|
||||
|
||||
Build-Image:
|
||||
runs-on: [ubuntu-docker-latest, "${{ matrix.platform }}"]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Prepare tags
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: ${{ env.CONTAINER_NAME }}
|
||||
# tags: |
|
||||
# type=pep440,pattern={{version}}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.mrmeeb.stream
|
||||
username: ${{ env.GITHUB_ACTOR }}
|
||||
password: ${{ secrets.GTCR_TOKEN }}
|
||||
- name: Build and push by digest
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
context: ./app
|
||||
platforms: ${{ matrix.platform }}
|
||||
provenance: false
|
||||
outputs: type=image,name=${{ env.CONTAINER_NAME }},push-by-digest=true,name-canonical=true,push=true
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Merge-Images:
|
||||
runs-on: ubuntu-docker-latest
|
||||
needs: [Build-Image]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Prepare Docker metadata
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: ${{ env.CONTAINER_NAME }}
|
||||
- name: Login to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.mrmeeb.stream
|
||||
username: ${{ env.GITHUB_ACTOR }}
|
||||
password: ${{ secrets.GTCR_TOKEN }}
|
||||
- name: Create manifest latest
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker manifest create ${{ env.CONTAINER_NAME }}:latest \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
docker manifest inspect ${{ env.CONTAINER_NAME }}:latest
|
||||
|
||||
docker manifest push ${{ env.CONTAINER_NAME }}:latest
|
||||
- name: Create manifest tagged
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker manifest create ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
docker manifest inspect ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
|
||||
|
||||
docker manifest push ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
|
||||
# Disabled due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Create manifest list and push
|
||||
# working-directory: /tmp/digests
|
||||
# run: |
|
||||
# echo $DOCKER_METADATA_OUTPUT_JSON
|
||||
# echo $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
|
||||
# docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
|
||||
#- name: Inspect image
|
||||
# run: |
|
||||
# docker buildx imagetools inspect ${{ env.CONTAINER_NAME }}:${{ steps.meta.outputs.version }}
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Create-Release:
|
||||
runs-on: [ubuntu-latest, linux/amd64]
|
||||
needs: [Merge-Images]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Prepare tea
|
||||
run: |
|
||||
# Download tea from Gitea release page
|
||||
echo "Downloading Tea v${{ env.TEA_VERSION }}" && \
|
||||
wget -q -O tea https://gitea.com/gitea/tea/releases/download/v${{ env.TEA_VERSION }}/tea-${{ env.TEA_VERSION }}-linux-amd64 && \
|
||||
echo "Downloaded Tea" && \
|
||||
chmod +x tea && \
|
||||
# Login to Gitea
|
||||
echo "Logging in to Gitea using Tea" && \
|
||||
./tea login add --name SimpleLogin --url https://git.mrmeeb.stream --token ${{ secrets.GITHUB_TOKEN }} && \
|
||||
echo "Done"
|
||||
- name: Make release
|
||||
run: |
|
||||
echo "Creating release" && \
|
||||
./tea release create --login "SimpleLogin" --repo ${{ env.GITHUB_REPOSITORY }} --tag ${{ env.RELEASE_VERSION }} -t ${{ env.RELEASE_VERSION }} -n "Triggered by release of v${{ env.RELEASE_VERSION }} by the SimpleLogin team. <a href=\"https://github.com/simple-login/app/releases/tag/v${{ env.RELEASE_VERSION }}\" target=\"_blank\">View the changelog</a>" && \
|
||||
echo "Done"
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Notify:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [Build-Image, Merge-Images, Create-Release]
|
||||
steps:
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: always()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
@ -168,8 +168,6 @@ class NewUserStrategy(ClientMergeStrategy):
|
||||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# IF it was scheduled to be deleted. Unschedule it.
|
||||
self.user.delete_on = None
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
self.link_request, self.user, self.partner
|
||||
)
|
||||
@ -248,8 +246,6 @@ def link_user(
|
||||
) -> LinkResult:
|
||||
# Sanitize email just in case
|
||||
link_request.email = sanitize_email(link_request.email)
|
||||
# If it was scheduled to be deleted. Unschedule it.
|
||||
current_user.delete_on = None
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
link_request, current_user, partner
|
||||
)
|
||||
|
||||
@ -33,9 +33,6 @@ def authorize_request() -> Optional[Tuple[str, int]]:
|
||||
if g.user.disabled:
|
||||
return jsonify(error="Disabled account"), 403
|
||||
|
||||
if not g.user.is_active():
|
||||
return jsonify(error="Account does not exist"), 401
|
||||
|
||||
g.api_key = api_key
|
||||
return None
|
||||
|
||||
|
||||
@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
|
||||
q = q.order_by(Alias.pinned.desc())
|
||||
q = q.order_by(latest_activity.desc())
|
||||
|
||||
q = q.limit(page_limit).offset(page_id * page_size)
|
||||
q = list(q.limit(page_limit).offset(page_id * page_size))
|
||||
|
||||
ret = []
|
||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
|
||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
|
||||
ret.append(
|
||||
AliasInfo(
|
||||
alias=alias,
|
||||
@ -358,6 +358,7 @@ def construct_alias_query(user: User):
|
||||
else_=0,
|
||||
)
|
||||
).label("nb_forward"),
|
||||
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
|
||||
)
|
||||
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
|
||||
.filter(Alias.user_id == user.id)
|
||||
@ -365,6 +366,14 @@ def construct_alias_query(user: User):
|
||||
.subquery()
|
||||
)
|
||||
|
||||
alias_contact_subquery = (
|
||||
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
|
||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
||||
.filter(Alias.user_id == user.id)
|
||||
.group_by(Alias.id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
return (
|
||||
Session.query(
|
||||
Alias,
|
||||
@ -376,7 +385,23 @@ def construct_alias_query(user: User):
|
||||
)
|
||||
.options(joinedload(Alias.hibp_breaches))
|
||||
.options(joinedload(Alias.custom_domain))
|
||||
.join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
|
||||
.join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
|
||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
||||
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
|
||||
.filter(Alias.id == alias_activity_subquery.c.id)
|
||||
.filter(Alias.id == alias_contact_subquery.c.id)
|
||||
.filter(
|
||||
or_(
|
||||
EmailLog.created_at
|
||||
== alias_activity_subquery.c.latest_email_log_created_at,
|
||||
and_(
|
||||
# no email log yet for this alias
|
||||
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
|
||||
# to make sure only 1 contact is returned in this case
|
||||
or_(
|
||||
Contact.id == alias_contact_subquery.c.max_contact_id,
|
||||
alias_contact_subquery.c.max_contact_id.is_(None),
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
|
||||
refused_email = RefusedEmail.create(
|
||||
full_report_path=s3_report_path, user_id=alias.user_id, flush=True
|
||||
)
|
||||
email_log = EmailLog.create(
|
||||
return EmailLog.create(
|
||||
user_id=alias.user_id,
|
||||
mailbox_id=alias.mailbox_id,
|
||||
contact_id=contact.id,
|
||||
@ -142,7 +142,6 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
|
||||
blocked=True,
|
||||
commit=True,
|
||||
)
|
||||
return email_log
|
||||
|
||||
|
||||
def apply_dmarc_policy_for_reply_phase(
|
||||
|
||||
@ -727,11 +727,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
return True
|
||||
|
||||
def is_active(self) -> bool:
|
||||
if self.delete_on is None:
|
||||
return True
|
||||
return self.delete_on < arrow.now()
|
||||
|
||||
def in_trial(self):
|
||||
"""return True if user does not have lifetime licence or an active subscription AND is in trial period"""
|
||||
if self.lifetime_or_active_subscription():
|
||||
@ -833,9 +828,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
Whether user can create a new alias. User can't create a new alias if
|
||||
- has more than 15 aliases in the free plan, *even in the free trial*
|
||||
"""
|
||||
if not self.is_active():
|
||||
return False
|
||||
|
||||
if self.disabled:
|
||||
return False
|
||||
|
||||
@ -916,11 +908,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
return sub
|
||||
|
||||
def verified_custom_domains(self) -> List["CustomDomain"]:
|
||||
return (
|
||||
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
|
||||
.order_by(CustomDomain.domain.asc())
|
||||
.all()
|
||||
)
|
||||
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all()
|
||||
|
||||
def mailboxes(self) -> List["Mailbox"]:
|
||||
"""list of mailbox that user own"""
|
||||
@ -1508,8 +1496,6 @@ class Alias(Base, ModelMixin):
|
||||
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
|
||||
)
|
||||
|
||||
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
|
||||
# index on note column using pg_trgm
|
||||
@ -2069,20 +2055,6 @@ class EmailLog(Base, ModelMixin):
|
||||
def get_dashboard_url(self):
|
||||
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
|
||||
|
||||
@classmethod
|
||||
def create(cls, *args, **kwargs):
|
||||
commit = kwargs.pop("commit", False)
|
||||
email_log = super().create(*args, **kwargs)
|
||||
Session.flush()
|
||||
if "alias_id" in kwargs:
|
||||
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
|
||||
Session.execute(
|
||||
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
|
||||
)
|
||||
if commit:
|
||||
Session.commit()
|
||||
return email_log
|
||||
|
||||
def __repr__(self):
|
||||
return f"<EmailLog {self.id}>"
|
||||
|
||||
|
||||
@ -140,7 +140,7 @@ def authorize():
|
||||
Scope=Scope,
|
||||
)
|
||||
else: # POST - user allows or denies
|
||||
if not current_user.is_authenticated or not current_user.is_active():
|
||||
if not current_user.is_authenticated or not current_user.is_active:
|
||||
LOG.i(
|
||||
"Attempt to validate a OAUth allow request by an unauthenticated user"
|
||||
)
|
||||
|
||||
@ -6,7 +6,7 @@ import redis.exceptions
|
||||
import werkzeug.exceptions
|
||||
from limits.storage import RedisStorage
|
||||
|
||||
from app.log import LOG
|
||||
from app.log import log
|
||||
|
||||
lock_redis: Optional[RedisStorage] = None
|
||||
|
||||
@ -22,19 +22,17 @@ def check_bucket_limit(
|
||||
bucket_seconds: int = 3600,
|
||||
):
|
||||
# Calculate current bucket time
|
||||
int_time = int(datetime.utcnow().timestamp())
|
||||
bucket_id = int_time - (int_time % bucket_seconds)
|
||||
bucket_id = int(datetime.utcnow().timestamp()) % bucket_seconds
|
||||
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
|
||||
if not lock_redis:
|
||||
return
|
||||
try:
|
||||
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
|
||||
if value > max_hits:
|
||||
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
|
||||
newrelic.agent.record_custom_event(
|
||||
"BucketRateLimit",
|
||||
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
|
||||
)
|
||||
raise werkzeug.exceptions.TooManyRequests()
|
||||
except (redis.exceptions.RedisError, AttributeError):
|
||||
LOG.e("Cannot connect to redis")
|
||||
log.e("Cannot connect to redis")
|
||||
|
||||
13
app/cron.py
13
app/cron.py
@ -62,8 +62,6 @@ from app.proton.utils import get_proton_partner
|
||||
from app.utils import sanitize_email
|
||||
from server import create_light_app
|
||||
|
||||
DELETE_GRACE_DAYS = 30
|
||||
|
||||
|
||||
def notify_trial_end():
|
||||
for user in User.filter(
|
||||
@ -1128,19 +1126,14 @@ def notify_hibp():
|
||||
Session.commit()
|
||||
|
||||
|
||||
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||
def clear_users_scheduled_to_be_deleted():
|
||||
users = User.filter(
|
||||
and_(
|
||||
User.delete_on.isnot(None),
|
||||
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
||||
)
|
||||
and_(User.delete_on.isnot(None), User.delete_on < arrow.now())
|
||||
).all()
|
||||
for user in users:
|
||||
LOG.i(
|
||||
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
|
||||
)
|
||||
if dry_run:
|
||||
continue
|
||||
User.delete(user.id)
|
||||
Session.commit()
|
||||
|
||||
@ -1213,4 +1206,4 @@ if __name__ == "__main__":
|
||||
load_unsent_mails_from_fs_and_resend()
|
||||
elif args.job == "delete_scheduled_users":
|
||||
LOG.d("Deleting users scheduled to be deleted")
|
||||
clear_users_scheduled_to_be_deleted(dry_run=True)
|
||||
clear_users_scheduled_to_be_deleted()
|
||||
|
||||
@ -62,7 +62,7 @@ jobs:
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin delete users scheduled to be deleted
|
||||
command: python /code/cron.py -j delete_scheduled_users
|
||||
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users
|
||||
shell: /bin/bash
|
||||
schedule: "15 11 * * *"
|
||||
captureStderr: true
|
||||
|
||||
@ -636,10 +636,6 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||
|
||||
user = alias.user
|
||||
|
||||
if not user.is_active():
|
||||
LOG.w(f"User {user} has been soft deleted")
|
||||
return False, status.E502
|
||||
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot receive emails")
|
||||
if should_ignore_bounce(envelope.mail_from):
|
||||
@ -1059,9 +1055,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
if not contact:
|
||||
LOG.w(f"No contact with {reply_email} as reverse alias")
|
||||
return False, status.E502
|
||||
if not contact.user.is_active():
|
||||
LOG.w(f"User {contact.user} has been soft deleted")
|
||||
return False, status.E502
|
||||
|
||||
alias = contact.alias
|
||||
alias_address: str = contact.alias.email
|
||||
@ -1928,9 +1921,6 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
||||
contact,
|
||||
alias,
|
||||
)
|
||||
if not email_log.user.is_active():
|
||||
LOG.d(f"User {email_log.user} is not active")
|
||||
return status.E510
|
||||
|
||||
if email_log.is_reply:
|
||||
content_type = msg.get_content_type().lower()
|
||||
@ -1992,9 +1982,6 @@ def send_no_reply_response(mail_from: str, msg: Message):
|
||||
if not mailbox:
|
||||
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
|
||||
return
|
||||
if not mailbox.user.is_active():
|
||||
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
|
||||
return
|
||||
send_email_at_most_times(
|
||||
mailbox.user,
|
||||
ALERT_TO_NOREPLY,
|
||||
|
||||
@ -7460,7 +7460,9 @@ villain
|
||||
vindicate
|
||||
vineyard
|
||||
vintage
|
||||
violate
|
||||
violation
|
||||
violator
|
||||
violet
|
||||
violin
|
||||
viper
|
||||
|
||||
@ -1,29 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 818b0a956205
|
||||
Revises: 4bc54632d9aa
|
||||
Create Date: 2024-02-01 10:43:46.253184
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '818b0a956205'
|
||||
down_revision = '4bc54632d9aa'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('alias', 'last_email_log_id')
|
||||
# ### end Alembic commands ###
|
||||
@ -1,44 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import time
|
||||
|
||||
from sqlalchemy import func
|
||||
from app.models import Alias
|
||||
from app.db import Session
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Backfill alias", description="Backfill alias las use"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
|
||||
)
|
||||
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
|
||||
|
||||
args = parser.parse_args()
|
||||
alias_id_start = args.start_alias_id
|
||||
max_alias_id = args.end_alias_id
|
||||
if max_alias_id == 0:
|
||||
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||
|
||||
print(f"Checking alias {alias_id_start} to {max_alias_id}")
|
||||
step = 1000
|
||||
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
|
||||
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
|
||||
updated = 0
|
||||
start_time = time.time()
|
||||
for batch_start in range(alias_id_start, max_alias_id, step):
|
||||
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
|
||||
for row in rows:
|
||||
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
|
||||
Session.commit()
|
||||
updated += 1
|
||||
elapsed = time.time() - start_time
|
||||
time_per_alias = elapsed / (updated + 1)
|
||||
last_batch_id = batch_start + step
|
||||
remaining = max_alias_id - last_batch_id
|
||||
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
|
||||
hours_remaining = time_remaining / 3600.0
|
||||
print(
|
||||
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
|
||||
)
|
||||
print("")
|
||||
@ -228,8 +228,6 @@ def load_user(alternative_id):
|
||||
sentry_sdk.set_user({"email": user.email, "id": user.id})
|
||||
if user.disabled:
|
||||
return None
|
||||
if not user.is_active():
|
||||
return None
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@ -40,16 +40,14 @@ def test_get_notifications(flask_client):
|
||||
def test_mark_notification_as_read(flask_client):
|
||||
user, api_key = get_new_user_and_api_key()
|
||||
|
||||
notif_id = Notification.create(
|
||||
user_id=user.id, message="Test message 1", flush=True
|
||||
).id
|
||||
Notification.create(id=1, user_id=user.id, message="Test message 1")
|
||||
Session.commit()
|
||||
|
||||
r = flask_client.post(
|
||||
url_for("api.mark_as_read", notification_id=notif_id),
|
||||
url_for("api.mark_as_read", notification_id=1),
|
||||
headers={"Authentication": api_key.code},
|
||||
)
|
||||
|
||||
assert r.status_code == 200
|
||||
notification = Notification.filter_by(id=notif_id).first()
|
||||
notification = Notification.first()
|
||||
assert notification.read
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
from app.api.serializer import get_alias_infos_with_pagination_v3
|
||||
from app.config import PAGE_LIMIT
|
||||
from app.db import Session
|
||||
from app.models import Alias, Mailbox, Contact, EmailLog
|
||||
from tests.utils import create_new_user, random_email
|
||||
from app.models import Alias, Mailbox, Contact
|
||||
from tests.utils import create_new_user
|
||||
|
||||
|
||||
def test_get_alias_infos_with_pagination_v3(flask_client):
|
||||
@ -155,46 +155,3 @@ def test_get_alias_infos_pinned_alias(flask_client):
|
||||
# pinned alias isn't included in the search
|
||||
alias_infos = get_alias_infos_with_pagination_v3(user, query="no match")
|
||||
assert len(alias_infos) == 0
|
||||
|
||||
|
||||
def test_get_alias_infos_with_no_last_email_log(flask_client):
|
||||
user = create_new_user()
|
||||
alias_infos = get_alias_infos_with_pagination_v3(user)
|
||||
assert len(alias_infos) == 1
|
||||
row = alias_infos[0]
|
||||
assert row.alias.id == user.newsletter_alias_id
|
||||
assert row.latest_contact is None
|
||||
assert row.latest_email_log is None
|
||||
|
||||
|
||||
def test_get_alias_infos_with_email_log_no_contact():
|
||||
user = create_new_user()
|
||||
contact = Contact.create(
|
||||
user_id=user.id,
|
||||
alias_id=user.newsletter_alias_id,
|
||||
website_email="a@a.com",
|
||||
reply_email=random_email(),
|
||||
flush=True,
|
||||
)
|
||||
Contact.create(
|
||||
user_id=user.id,
|
||||
alias_id=user.newsletter_alias_id,
|
||||
website_email="unused@a.com",
|
||||
reply_email=random_email(),
|
||||
flush=True,
|
||||
)
|
||||
EmailLog.create(
|
||||
user_id=user.id,
|
||||
alias_id=user.newsletter_alias_id,
|
||||
contact_id=contact.id,
|
||||
commit=True,
|
||||
)
|
||||
alias_infos = get_alias_infos_with_pagination_v3(user)
|
||||
assert len(alias_infos) == 1
|
||||
row = alias_infos[0]
|
||||
assert row.alias.id == user.newsletter_alias_id
|
||||
assert row.latest_contact is not None
|
||||
assert row.latest_contact.id == contact.id
|
||||
assert row.latest_email_log is not None
|
||||
alias = Alias.get(id=user.newsletter_alias_id)
|
||||
assert row.latest_email_log.id == alias.last_email_log_id
|
||||
|
||||
@ -39,17 +39,15 @@ def test_cleanup_tokens(flask_client):
|
||||
|
||||
def test_cleanup_users():
|
||||
u_delete_none_id = create_new_user().id
|
||||
u_delete_grace_has_expired = create_new_user()
|
||||
u_delete_grace_has_expired_id = u_delete_grace_has_expired.id
|
||||
u_delete_grace_has_not_expired = create_new_user()
|
||||
u_delete_grace_has_not_expired_id = u_delete_grace_has_not_expired.id
|
||||
u_delete_after = create_new_user()
|
||||
u_delete_after_id = u_delete_after.id
|
||||
u_delete_before = create_new_user()
|
||||
u_delete_before_id = u_delete_before.id
|
||||
now = arrow.now()
|
||||
u_delete_grace_has_expired.delete_on = now.shift(days=-(cron.DELETE_GRACE_DAYS + 1))
|
||||
u_delete_grace_has_not_expired.delete_on = now.shift(
|
||||
days=-(cron.DELETE_GRACE_DAYS - 1)
|
||||
)
|
||||
u_delete_after.delete_on = now.shift(minutes=1)
|
||||
u_delete_before.delete_on = now.shift(minutes=-1)
|
||||
Session.flush()
|
||||
cron.clear_users_scheduled_to_be_deleted()
|
||||
assert User.get(u_delete_none_id) is not None
|
||||
assert User.get(u_delete_grace_has_not_expired_id) is not None
|
||||
assert User.get(u_delete_grace_has_expired_id) is None
|
||||
assert User.get(u_delete_after_id) is not None
|
||||
assert User.get(u_delete_before_id) is None
|
||||
|
||||
@ -57,7 +57,6 @@ from tests.utils import (
|
||||
login,
|
||||
load_eml_file,
|
||||
create_new_user,
|
||||
random_email,
|
||||
random_domain,
|
||||
random_token,
|
||||
)
|
||||
@ -187,14 +186,13 @@ def test_parse_full_address():
|
||||
|
||||
def test_send_email_with_rate_control(flask_client):
|
||||
user = create_new_user()
|
||||
email = random_email()
|
||||
|
||||
for _ in range(MAX_ALERT_24H):
|
||||
assert send_email_with_rate_control(
|
||||
user, "test alert type", email, "subject", "plaintext"
|
||||
user, "test alert type", "abcd@gmail.com", "subject", "plaintext"
|
||||
)
|
||||
assert not send_email_with_rate_control(
|
||||
user, "test alert type", email, "subject", "plaintext"
|
||||
user, "test alert type", "abcd@gmail.com", "subject", "plaintext"
|
||||
)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user