Compare commits
54 Commits
Author | SHA1 | Date | |
---|---|---|---|
050cef0e4e | |||
0d557ef875 | |||
6e56ea4489 | |||
def0de643b | |||
9e7cb2c7dd | |||
f1110506c0 | |||
f5bce7d7ff | |||
75f45d9365 | |||
ead425e0c2 | |||
6c910d62c5 | |||
99ffd1ec0c | |||
eda940f8b2 | |||
1dad582523 | |||
e516266a27 | |||
850fc95477 | |||
d172825900 | |||
026865e5bf | |||
add94ef2a2 | |||
1081400948 | |||
5776128905 | |||
d661860f4c | |||
0a52e32972 | |||
703dcbd0eb | |||
ce7ed69547 | |||
4f5564df16 | |||
2fee569131 | |||
7ea45d6f5d | |||
6d24db50bd | |||
88f270c6a1 | |||
0962b1cf29 | |||
6051d72691 | |||
c31a75a9ef | |||
ef289385ff | |||
9b12a2ad33 | |||
8eb19d88f3 | |||
e36e9d3077 | |||
b2430cbc5b | |||
1258115397 | |||
38c134d903 | |||
cd77e4cc2d | |||
87aedf3207 | |||
3523c9fc15 | |||
a6f4995cb5 | |||
727f61a35e | |||
ce5124605a | |||
2c82b03f8d | |||
1b7a6223ac | |||
75331c62a4 | |||
3f68a3e640 | |||
8ee4f9462e | |||
822855d584 | |||
1a6a7e079b | |||
5210cb6515 | |||
b643f0644b |
46
.drone.yml
46
.drone.yml
@ -1,46 +0,0 @@
|
||||
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: build-multiarch-images
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: make-tags
|
||||
image: node
|
||||
commands:
|
||||
- echo -n "${DRONE_TAG}, latest" > .tags
|
||||
|
||||
- name: build
|
||||
image: thegeeklab/drone-docker-buildx
|
||||
privileged: true
|
||||
settings:
|
||||
dockerfile: app/Dockerfile
|
||||
context: app
|
||||
registry: git.mrmeeb.stream
|
||||
username:
|
||||
from_secret: docker_username
|
||||
password:
|
||||
from_secret: docker_password
|
||||
repo: git.mrmeeb.stream/mrmeeb/simple-login
|
||||
platforms:
|
||||
- linux/arm64
|
||||
- linux/amd64
|
||||
|
||||
- name: notify
|
||||
image: plugins/slack
|
||||
settings:
|
||||
webhook:
|
||||
from_secret: slack_webhook
|
||||
icon_url:
|
||||
from_secret: slack_avatar
|
||||
|
||||
trigger:
|
||||
event:
|
||||
include:
|
||||
- tag
|
||||
ref:
|
||||
include:
|
||||
- refs/tags/**
|
195
.gitea/workflows/build-release-image.yaml
Normal file
195
.gitea/workflows/build-release-image.yaml
Normal file
@ -0,0 +1,195 @@
|
||||
name: Build-Release-Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login-dev
|
||||
TEA_VERSION: 0.9.2
|
||||
|
||||
jobs:
|
||||
|
||||
Build-Image:
|
||||
runs-on: [ubuntu-docker-latest, "${{ matrix.platform }}"]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Prepare tags
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: ${{ env.CONTAINER_NAME }}
|
||||
# tags: |
|
||||
# type=pep440,pattern={{version}}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.mrmeeb.stream
|
||||
username: ${{ env.GITHUB_ACTOR }}
|
||||
password: ${{ secrets.GTCR_TOKEN }}
|
||||
- name: Build and push by digest
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
context: ./app
|
||||
platforms: ${{ matrix.platform }}
|
||||
provenance: false
|
||||
outputs: type=image,name=${{ env.CONTAINER_NAME }},push-by-digest=true,name-canonical=true,push=true
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Merge-Images:
|
||||
runs-on: ubuntu-docker-latest
|
||||
needs: [Build-Image]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Prepare Docker metadata
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: ${{ env.CONTAINER_NAME }}
|
||||
- name: Login to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.mrmeeb.stream
|
||||
username: ${{ env.GITHUB_ACTOR }}
|
||||
password: ${{ secrets.GTCR_TOKEN }}
|
||||
- name: Create manifest latest
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker manifest create ${{ env.CONTAINER_NAME }}:latest \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
docker manifest inspect ${{ env.CONTAINER_NAME }}:latest
|
||||
|
||||
docker manifest push ${{ env.CONTAINER_NAME }}:latest
|
||||
- name: Create manifest tagged
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker manifest create ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
docker manifest inspect ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
|
||||
|
||||
docker manifest push ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
|
||||
# Disabled due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Create manifest list and push
|
||||
# working-directory: /tmp/digests
|
||||
# run: |
|
||||
# echo $DOCKER_METADATA_OUTPUT_JSON
|
||||
# echo $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
|
||||
# docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
|
||||
#- name: Inspect image
|
||||
# run: |
|
||||
# docker buildx imagetools inspect ${{ env.CONTAINER_NAME }}:${{ steps.meta.outputs.version }}
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Create-Release:
|
||||
runs-on: [ubuntu-latest, linux/amd64]
|
||||
needs: [Merge-Images]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Prepare tea
|
||||
run: |
|
||||
# Download tea from Gitea release page
|
||||
echo "Downloading Tea v${{ env.TEA_VERSION }}" && \
|
||||
wget -q -O tea https://gitea.com/gitea/tea/releases/download/v${{ env.TEA_VERSION }}/tea-${{ env.TEA_VERSION }}-linux-amd64 && \
|
||||
echo "Downloaded Tea" && \
|
||||
chmod +x tea && \
|
||||
# Login to Gitea
|
||||
echo "Logging in to Gitea using Tea" && \
|
||||
./tea login add --name SimpleLogin --url https://git.mrmeeb.stream --token ${{ secrets.GITHUB_TOKEN }} && \
|
||||
echo "Done"
|
||||
- name: Make release
|
||||
run: |
|
||||
echo "Creating release" && \
|
||||
./tea release create --login "SimpleLogin" --repo ${{ env.GITHUB_REPOSITORY }} --tag ${{ env.RELEASE_VERSION }} -t ${{ env.RELEASE_VERSION }} -n "Triggered by release of v${{ env.RELEASE_VERSION }} by the SimpleLogin team. <a href=\"https://github.com/simple-login/app/releases/tag/v${{ env.RELEASE_VERSION }}\" target=\"_blank\">View the changelog</a>" && \
|
||||
echo "Done"
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Notify:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [Build-Image, Merge-Images, Create-Release]
|
||||
steps:
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: always()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
10
README.md
10
README.md
@ -1,9 +1,7 @@
|
||||
# Simple Login
|
||||
# SimpleLogin
|
||||
|
||||
[](https://drone.mrmeeb.stream/MrMeeb/simple-login)
|
||||
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks the simplelogin/app GitHub repo once a day, and builds the latest release automatically if it is newer than the currently built version.
|
||||
|
||||
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks once a day, and builds the latest one automatically if it is newer than the currentlty built version.
|
||||
I did this to simplify deployment of my self-hosted SimpleLogin instance. SimpleLogin do not provide an up-to-date version for self-hosting, leaving you with the options of either running a very outdated version with no app support, a beta version, or their `simplelogin/app-ci` version. This last option works well if you use an x86 machine, but I'm running SimpleLogin on an ARM machine. Since I don't want to have to build containers on the machine itself, this repo handles that for me.
|
||||
|
||||
This exists to simplify deployment of SimpleLogin in a self-hosted capacity, while also allowing the use of the latest version; SimpleLogin do not provide an up-to-date version for this use.
|
||||
|
||||
The image is built for amd64 and arm64 devices.
|
||||
As a result, this image is built for both amd64 and arm64 devices.
|
8
app/.github/workflows/main.yml
vendored
8
app/.github/workflows/main.yml
vendored
@ -15,9 +15,15 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
|
@ -7,17 +7,19 @@ repos:
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: djlint-jinja
|
||||
files: '.*\.html'
|
||||
entry: djlint --reformat
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
|
@ -34,7 +34,7 @@ poetry install
|
||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||
|
||||
```bash
|
||||
brew install pkg-config libffi openssl postgresql
|
||||
brew install pkg-config libffi openssl postgresql@13
|
||||
```
|
||||
|
||||
You also need to install `gpg` tool, on Mac it can be done with:
|
||||
@ -169,6 +169,12 @@ For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||
poetry run djlint --check templates
|
||||
```
|
||||
|
||||
If some files aren't properly formatted, you can format all files with
|
||||
|
||||
```bash
|
||||
poetry run djlint --reformat .
|
||||
```
|
||||
|
||||
## Test sending email
|
||||
|
||||
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
|
||||
|
@ -23,15 +23,15 @@ COPY poetry.lock pyproject.toml ./
|
||||
# Install and setup poetry
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||
# Remove curl and netcat from the image
|
||||
&& apt-get purge -y curl netcat \
|
||||
&& apt-get purge -y curl netcat-traditional \
|
||||
# Run poetry
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root \
|
||||
# Clear apt cache \
|
||||
&& apt-get purge -y libre2-dev \
|
||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
@ -74,7 +74,7 @@ Setting up DKIM is highly recommended to reduce the chance your emails ending up
|
||||
First you need to generate a private and public key for DKIM:
|
||||
|
||||
```bash
|
||||
openssl genrsa -out dkim.key 1024
|
||||
openssl genrsa -out dkim.key -traditional 1024
|
||||
openssl rsa -in dkim.key -pubout -out dkim.pub.key
|
||||
```
|
||||
|
||||
@ -510,11 +510,14 @@ server {
|
||||
server_name app.mydomain.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:7777;
|
||||
proxy_pass http://localhost:7777;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Note: If `/etc/nginx/sites-enabled/default` exists, delete it or certbot will fail due to the conflict. The `simplelogin` file should be the only file in `sites-enabled`.
|
||||
|
||||
Reload Nginx with the command below
|
||||
|
||||
```bash
|
||||
|
@ -5,17 +5,23 @@ from typing import Optional
|
||||
|
||||
from arrow import Arrow
|
||||
from newrelic import agent
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.utils import sanitize_email
|
||||
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.errors import (
|
||||
AccountAlreadyLinkedToAnotherPartnerException,
|
||||
AccountIsUsingAliasAsEmail,
|
||||
AccountAlreadyLinkedToAnotherUserException,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
PartnerSubscription,
|
||||
Partner,
|
||||
PartnerUser,
|
||||
User,
|
||||
Alias,
|
||||
)
|
||||
from app.utils import random_string
|
||||
|
||||
@ -126,8 +132,9 @@ class ClientMergeStrategy(ABC):
|
||||
class NewUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# Will create a new SL User with a random password
|
||||
canonical_email = canonicalize_email(self.link_request.email)
|
||||
new_user = User.create(
|
||||
email=self.link_request.email,
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
@ -161,7 +168,8 @@ class NewUserStrategy(ClientMergeStrategy):
|
||||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
|
||||
# IF it was scheduled to be deleted. Unschedule it.
|
||||
self.user.delete_on = None
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
self.link_request, self.user, self.partner
|
||||
)
|
||||
@ -175,7 +183,7 @@ class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
|
||||
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||
raise AccountAlreadyLinkedToAnotherUserException()
|
||||
|
||||
|
||||
def get_login_strategy(
|
||||
@ -192,6 +200,12 @@ def get_login_strategy(
|
||||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||
|
||||
|
||||
def check_alias(email: str) -> bool:
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias is not None:
|
||||
raise AccountIsUsingAliasAsEmail()
|
||||
|
||||
|
||||
def process_login_case(
|
||||
link_request: PartnerLinkRequest, partner: Partner
|
||||
) -> LinkResult:
|
||||
@ -202,9 +216,21 @@ def process_login_case(
|
||||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||
)
|
||||
if partner_user is None:
|
||||
canonical_email = canonicalize_email(link_request.email)
|
||||
# We didn't find any SimpleLogin user registered with that partner user id
|
||||
# Make sure they aren't using an alias as their link email
|
||||
check_alias(link_request.email)
|
||||
check_alias(canonical_email)
|
||||
# Try to find it using the partner's e-mail address
|
||||
user = User.get_by(email=link_request.email)
|
||||
users = User.filter(
|
||||
or_(User.email == link_request.email, User.email == canonical_email)
|
||||
).all()
|
||||
if len(users) > 1:
|
||||
user = [user for user in users if user.email == canonical_email][0]
|
||||
elif len(users) == 1:
|
||||
user = users[0]
|
||||
else:
|
||||
user = None
|
||||
return get_login_strategy(link_request, user, partner).process()
|
||||
else:
|
||||
# We found the SL user registered with that partner user id
|
||||
@ -222,6 +248,8 @@ def link_user(
|
||||
) -> LinkResult:
|
||||
# Sanitize email just in case
|
||||
link_request.email = sanitize_email(link_request.email)
|
||||
# If it was scheduled to be deleted. Unschedule it.
|
||||
current_user.delete_on = None
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
link_request, current_user, partner
|
||||
)
|
||||
|
@ -214,6 +214,20 @@ class UserAdmin(SLModelView):
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"remove trial",
|
||||
"Stop trial period",
|
||||
"Remove trial for this user?",
|
||||
)
|
||||
def stop_trial(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.trial_end = None
|
||||
|
||||
flash(f"Stopped trial for {user}", "success")
|
||||
AdminAuditLog.stop_trial(current_user.id, user.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"disable_otp_fido",
|
||||
"Disable OTP & FIDO",
|
||||
@ -256,6 +270,17 @@ class UserAdmin(SLModelView):
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"clear_delete_on",
|
||||
"Remove scheduled deletion of user",
|
||||
"This will remove the scheduled deletion for this users",
|
||||
)
|
||||
def clean_delete_on(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.delete_on = None
|
||||
|
||||
Session.commit()
|
||||
|
||||
# @action(
|
||||
# "login_as",
|
||||
# "Login as this user",
|
||||
@ -600,6 +625,26 @@ class NewsletterAdmin(SLModelView):
|
||||
else:
|
||||
flash(error_msg, "error")
|
||||
|
||||
@action(
|
||||
"clone_newsletter",
|
||||
"Clone this newsletter",
|
||||
)
|
||||
def clone_newsletter(self, newsletter_ids):
|
||||
if len(newsletter_ids) != 1:
|
||||
flash("you can only select 1 newsletter", "error")
|
||||
return
|
||||
|
||||
newsletter_id = newsletter_ids[0]
|
||||
newsletter: Newsletter = Newsletter.get(newsletter_id)
|
||||
new_newsletter = Newsletter.create(
|
||||
subject=newsletter.subject,
|
||||
html=newsletter.html,
|
||||
plain_text=newsletter.plain_text,
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
|
||||
|
||||
|
||||
class NewsletterUserAdmin(SLModelView):
|
||||
column_searchable_list = ["id"]
|
||||
|
@ -6,8 +6,7 @@ from typing import Optional
|
||||
import itsdangerous
|
||||
from app import config
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
|
||||
from app.models import User, AliasOptions, SLDomain
|
||||
|
||||
signer = itsdangerous.TimestampSigner(config.CUSTOM_ALIAS_SECRET)
|
||||
|
||||
@ -43,7 +42,9 @@ def check_suffix_signature(signed_suffix: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||
def verify_prefix_suffix(
|
||||
user: User, alias_prefix, alias_suffix, alias_options: Optional[AliasOptions] = None
|
||||
) -> bool:
|
||||
"""verify if user could create an alias with the given prefix and suffix"""
|
||||
if not alias_prefix or not alias_suffix: # should be caught on frontend
|
||||
return False
|
||||
@ -56,7 +57,7 @@ def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
|
||||
|
||||
# alias_domain must be either one of user custom domains or built-in domains
|
||||
if alias_domain not in user.available_alias_domains():
|
||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
@ -64,12 +65,11 @@ def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||
# 1) alias_suffix must start with "." and
|
||||
# 2) alias_domain_prefix must come from the word list
|
||||
if (
|
||||
alias_domain in user.available_sl_domains()
|
||||
alias_domain in user.available_sl_domains(alias_options=alias_options)
|
||||
and alias_domain not in user_custom_domains
|
||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||
and not config.DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
@ -80,14 +80,18 @@ def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
if alias_domain not in user.available_sl_domains():
|
||||
if alias_domain not in user.available_sl_domains(
|
||||
alias_options=alias_options
|
||||
):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
def get_alias_suffixes(
|
||||
user: User, alias_options: Optional[AliasOptions] = None
|
||||
) -> [AliasSuffix]:
|
||||
"""
|
||||
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
|
||||
"""
|
||||
@ -99,7 +103,9 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
# for each user domain, generate both the domain and a random suffix version
|
||||
for custom_domain in user_custom_domains:
|
||||
if custom_domain.random_prefix_generation:
|
||||
suffix = "." + user.get_random_alias_suffix() + "@" + custom_domain.domain
|
||||
suffix = (
|
||||
f".{user.get_random_alias_suffix(custom_domain)}@{custom_domain.domain}"
|
||||
)
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
@ -113,7 +119,7 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
suffix = "@" + custom_domain.domain
|
||||
suffix = f"@{custom_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
@ -134,16 +140,13 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
# then SimpleLogin domain
|
||||
for sl_domain in user.get_sl_domains():
|
||||
suffix = (
|
||||
(
|
||||
""
|
||||
if config.DISABLE_ALIAS_SUFFIX
|
||||
else "." + user.get_random_alias_suffix()
|
||||
)
|
||||
+ "@"
|
||||
+ sl_domain.domain
|
||||
sl_domains = user.get_sl_domains(alias_options=alias_options)
|
||||
default_domain_found = False
|
||||
for sl_domain in sl_domains:
|
||||
prefix = (
|
||||
"" if config.DISABLE_ALIAS_SUFFIX else f".{user.get_random_alias_suffix()}"
|
||||
)
|
||||
suffix = f"{prefix}@{sl_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
@ -152,11 +155,36 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
|
||||
# put the default domain to top
|
||||
if user.default_alias_public_domain_id == sl_domain.id:
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
# No default or this is not the default
|
||||
if (
|
||||
user.default_alias_public_domain_id is None
|
||||
or user.default_alias_public_domain_id != sl_domain.id
|
||||
):
|
||||
alias_suffixes.append(alias_suffix)
|
||||
else:
|
||||
default_domain_found = True
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
|
||||
if not default_domain_found:
|
||||
domain_conditions = {"id": user.default_alias_public_domain_id, "hidden": False}
|
||||
if not user.is_premium():
|
||||
domain_conditions["premium_only"] = False
|
||||
sl_domain = SLDomain.get_by(**domain_conditions)
|
||||
if sl_domain:
|
||||
prefix = (
|
||||
""
|
||||
if config.DISABLE_ALIAS_SUFFIX
|
||||
else f".{user.get_random_alias_suffix()}"
|
||||
)
|
||||
suffix = f"{prefix}@{sl_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
signed_suffix=signer.sign(suffix).decode(),
|
||||
is_premium=sl_domain.premium_only,
|
||||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
|
||||
return alias_suffixes
|
||||
|
@ -21,6 +21,8 @@ from app.email_utils import (
|
||||
send_cannot_create_directory_alias_disabled,
|
||||
get_email_local_part,
|
||||
send_cannot_create_domain_alias,
|
||||
send_email,
|
||||
render,
|
||||
)
|
||||
from app.errors import AliasInTrashError
|
||||
from app.log import LOG
|
||||
@ -36,6 +38,8 @@ from app.models import (
|
||||
EmailLog,
|
||||
Contact,
|
||||
AutoCreateRule,
|
||||
AliasUsedOn,
|
||||
ClientUser,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
|
||||
@ -57,6 +61,8 @@ def get_user_if_alias_would_auto_create(
|
||||
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
address, notify_user=notify_user
|
||||
)
|
||||
if DomainDeletedAlias.get_by(email=address):
|
||||
return None
|
||||
if domain_and_rule:
|
||||
return domain_and_rule[0].user
|
||||
directory = check_if_alias_can_be_auto_created_for_a_directory(
|
||||
@ -397,3 +403,58 @@ def alias_export_csv(user, csv_direct_export=False):
|
||||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||
output.headers["Content-type"] = "text/csv"
|
||||
return output
|
||||
|
||||
|
||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
@ -16,3 +16,22 @@ from .views import (
|
||||
sudo,
|
||||
user,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"alias_options",
|
||||
"new_custom_alias",
|
||||
"custom_domain",
|
||||
"new_random_alias",
|
||||
"user_info",
|
||||
"auth",
|
||||
"auth_mfa",
|
||||
"alias",
|
||||
"apple",
|
||||
"mailbox",
|
||||
"notification",
|
||||
"setting",
|
||||
"export",
|
||||
"phone",
|
||||
"sudo",
|
||||
"user",
|
||||
]
|
||||
|
@ -33,6 +33,9 @@ def authorize_request() -> Optional[Tuple[str, int]]:
|
||||
if g.user.disabled:
|
||||
return jsonify(error="Disabled account"), 403
|
||||
|
||||
if not g.user.is_active():
|
||||
return jsonify(error="Account does not exist"), 401
|
||||
|
||||
g.api_key = api_key
|
||||
return None
|
||||
|
||||
|
@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
|
||||
q = q.order_by(Alias.pinned.desc())
|
||||
q = q.order_by(latest_activity.desc())
|
||||
|
||||
q = list(q.limit(page_limit).offset(page_id * page_size))
|
||||
q = q.limit(page_limit).offset(page_id * page_size)
|
||||
|
||||
ret = []
|
||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
|
||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
|
||||
ret.append(
|
||||
AliasInfo(
|
||||
alias=alias,
|
||||
@ -358,7 +358,6 @@ def construct_alias_query(user: User):
|
||||
else_=0,
|
||||
)
|
||||
).label("nb_forward"),
|
||||
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
|
||||
)
|
||||
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
|
||||
.filter(Alias.user_id == user.id)
|
||||
@ -366,14 +365,6 @@ def construct_alias_query(user: User):
|
||||
.subquery()
|
||||
)
|
||||
|
||||
alias_contact_subquery = (
|
||||
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
|
||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
||||
.filter(Alias.user_id == user.id)
|
||||
.group_by(Alias.id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
return (
|
||||
Session.query(
|
||||
Alias,
|
||||
@ -385,23 +376,7 @@ def construct_alias_query(user: User):
|
||||
)
|
||||
.options(joinedload(Alias.hibp_breaches))
|
||||
.options(joinedload(Alias.custom_domain))
|
||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
||||
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
|
||||
.join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
|
||||
.join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
|
||||
.filter(Alias.id == alias_activity_subquery.c.id)
|
||||
.filter(Alias.id == alias_contact_subquery.c.id)
|
||||
.filter(
|
||||
or_(
|
||||
EmailLog.created_at
|
||||
== alias_activity_subquery.c.latest_email_log_created_at,
|
||||
and_(
|
||||
# no email log yet for this alias
|
||||
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
|
||||
# to make sure only 1 contact is returned in this case
|
||||
or_(
|
||||
Contact.id == alias_contact_subquery.c.max_contact_id,
|
||||
alias_contact_subquery.c.max_contact_id.is_(None),
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -24,12 +24,14 @@ from app.errors import (
|
||||
ErrContactAlreadyExists,
|
||||
ErrAddressInvalid,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||
|
||||
|
||||
@deprecated
|
||||
@api_bp.route("/aliases", methods=["GET", "POST"])
|
||||
@require_api_auth
|
||||
@limiter.limit("10/minute", key_func=lambda: g.user.id)
|
||||
def get_aliases():
|
||||
"""
|
||||
Get aliases
|
||||
@ -72,6 +74,7 @@ def get_aliases():
|
||||
|
||||
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
||||
@require_api_auth
|
||||
@limiter.limit("50/minute", key_func=lambda: g.user.id)
|
||||
def get_aliases_v2():
|
||||
"""
|
||||
Get aliases
|
||||
|
@ -9,6 +9,7 @@ from requests import RequestException
|
||||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import APPLE_API_SECRET, MACAPP_APPLE_API_SECRET
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import PlanEnum, AppleSubscription
|
||||
@ -16,9 +17,14 @@ from app.models import PlanEnum, AppleSubscription
|
||||
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
|
||||
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
|
||||
|
||||
# SL Mac app used to be in SL account
|
||||
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
|
||||
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
|
||||
|
||||
# SL Mac app is moved to Proton account
|
||||
_MACAPP_MONTHLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.monthly"
|
||||
_MACAPP_YEARLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.yearly"
|
||||
|
||||
# Apple API URL
|
||||
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
|
||||
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
|
||||
@ -50,6 +56,7 @@ def apple_process_payment():
|
||||
|
||||
apple_sub = verify_receipt(receipt_data, user, password)
|
||||
if apple_sub:
|
||||
execute_subscription_webhook(user)
|
||||
return jsonify(ok=True), 200
|
||||
|
||||
return jsonify(error="Processing failed"), 400
|
||||
@ -261,7 +268,11 @@ def apple_update_notification():
|
||||
plan = (
|
||||
PlanEnum.monthly
|
||||
if transaction["product_id"]
|
||||
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
|
||||
in (
|
||||
_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
|
||||
)
|
||||
else PlanEnum.yearly
|
||||
)
|
||||
|
||||
@ -282,6 +293,7 @@ def apple_update_notification():
|
||||
apple_sub.plan = plan
|
||||
apple_sub.product_id = transaction["product_id"]
|
||||
Session.commit()
|
||||
execute_subscription_webhook(user)
|
||||
return jsonify(ok=True), 200
|
||||
else:
|
||||
LOG.w(
|
||||
@ -514,7 +526,11 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
||||
plan = (
|
||||
PlanEnum.monthly
|
||||
if latest_transaction["product_id"]
|
||||
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
|
||||
in (
|
||||
_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
|
||||
)
|
||||
else PlanEnum.yearly
|
||||
)
|
||||
|
||||
@ -554,6 +570,7 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
||||
product_id=latest_transaction["product_id"],
|
||||
)
|
||||
|
||||
execute_subscription_webhook(user)
|
||||
Session.commit()
|
||||
|
||||
return apple_sub
|
||||
|
@ -11,7 +11,7 @@ from itsdangerous import Signer
|
||||
from app import email_utils
|
||||
from app.api.base import api_bp
|
||||
from app.config import FLASK_SECRET, DISABLE_REGISTRATION
|
||||
from app.dashboard.views.setting import send_reset_password_email
|
||||
from app.dashboard.views.account_setting import send_reset_password_email
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
@ -63,6 +63,11 @@ def auth_login():
|
||||
elif user.disabled:
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account disabled"), 400
|
||||
elif user.delete_on is not None:
|
||||
LoginEvent(
|
||||
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
|
||||
).send()
|
||||
return jsonify(error="Account scheduled for deletion"), 400
|
||||
elif not user.activated:
|
||||
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account not activated"), 422
|
||||
@ -357,7 +362,7 @@ def auth_payload(user, device) -> dict:
|
||||
|
||||
|
||||
@api_bp.route("/auth/forgot_password", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
@limiter.limit("2/minute")
|
||||
def forgot_password():
|
||||
"""
|
||||
User forgot password
|
||||
|
@ -13,8 +13,8 @@ from app.db import Session
|
||||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import sanitize_email
|
||||
@ -45,7 +45,7 @@ def create_mailbox():
|
||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||
|
||||
if not user.is_premium():
|
||||
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
|
||||
return jsonify(error="Only premium plan can add additional mailbox"), 400
|
||||
|
||||
if not is_valid_email(mailbox_email):
|
||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||
|
@ -150,7 +150,7 @@ def new_custom_alias_v3():
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
if type(data) is not dict:
|
||||
if not isinstance(data, dict):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
@ -168,7 +168,7 @@ def new_custom_alias_v3():
|
||||
return jsonify(error="alias prefix invalid format or too long"), 400
|
||||
|
||||
# check if mailbox is not tempered with
|
||||
if type(mailbox_ids) is not list:
|
||||
if not isinstance(mailbox_ids, list):
|
||||
return jsonify(error="mailbox_ids must be an array of id"), 400
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
|
@ -1,4 +1,5 @@
|
||||
import base64
|
||||
import dataclasses
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
@ -7,6 +8,7 @@ from flask import jsonify, g, request, make_response
|
||||
from app import s3, config
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import SESSION_COOKIE_NAME
|
||||
from app.dashboard.views.index import get_stats
|
||||
from app.db import Session
|
||||
from app.models import ApiKey, File, PartnerUser, User
|
||||
from app.proton.utils import get_proton_partner
|
||||
@ -30,6 +32,7 @@ def user_to_dict(user: User) -> dict:
|
||||
"in_trial": user.in_trial(),
|
||||
"max_alias_free_plan": user.max_alias_for_free_account(),
|
||||
"connected_proton_address": None,
|
||||
"can_create_reverse_alias": user.can_create_contacts(),
|
||||
}
|
||||
|
||||
if config.CONNECT_WITH_PROTON:
|
||||
@ -56,6 +59,7 @@ def user_info():
|
||||
- in_trial
|
||||
- max_alias_free
|
||||
- is_connected_with_proton
|
||||
- can_create_reverse_alias
|
||||
"""
|
||||
user = g.user
|
||||
|
||||
@ -136,3 +140,22 @@ def logout():
|
||||
response.delete_cookie(SESSION_COOKIE_NAME)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@api_bp.route("/stats")
|
||||
@require_api_auth
|
||||
def user_stats():
|
||||
"""
|
||||
Return stats
|
||||
|
||||
Output as json
|
||||
- nb_alias
|
||||
- nb_forward
|
||||
- nb_reply
|
||||
- nb_block
|
||||
|
||||
"""
|
||||
user = g.user
|
||||
stats = get_stats(user)
|
||||
|
||||
return jsonify(dataclasses.asdict(stats))
|
||||
|
@ -17,3 +17,23 @@ from .views import (
|
||||
recovery,
|
||||
api_to_cookie,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"login",
|
||||
"logout",
|
||||
"register",
|
||||
"activate",
|
||||
"resend_activation",
|
||||
"reset_password",
|
||||
"forgot_password",
|
||||
"github",
|
||||
"google",
|
||||
"facebook",
|
||||
"proton",
|
||||
"change_email",
|
||||
"mfa",
|
||||
"fido",
|
||||
"social",
|
||||
"recovery",
|
||||
"api_to_cookie",
|
||||
]
|
||||
|
@ -3,6 +3,7 @@ from flask_login import login_user
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import EmailChange, ResetPasswordCode
|
||||
|
||||
|
||||
@ -22,12 +23,14 @@ def change_email():
|
||||
return render_template("auth/change_email.html")
|
||||
|
||||
user = email_change.user
|
||||
old_email = user.email
|
||||
user.email = email_change.new_email
|
||||
|
||||
EmailChange.delete(email_change.id)
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
Session.commit()
|
||||
|
||||
LOG.i(f"User {user} has changed their email from {old_email} to {user.email}")
|
||||
flash("Your new email has been updated", "success")
|
||||
|
||||
login_user(user)
|
||||
|
@ -62,7 +62,7 @@ def fido():
|
||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
@ -110,7 +110,7 @@ def fido():
|
||||
|
||||
session["sudo_time"] = int(time())
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
@ -1,9 +1,9 @@
|
||||
from flask import request, render_template, redirect, url_for, flash, g
|
||||
from flask import request, render_template, flash, g
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.dashboard.views.setting import send_reset_password_email
|
||||
from app.dashboard.views.account_setting import send_reset_password_email
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
@ -16,7 +16,7 @@ class ForgotPasswordForm(FlaskForm):
|
||||
|
||||
@auth_bp.route("/forgot_password", methods=["GET", "POST"])
|
||||
@limiter.limit(
|
||||
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||
"10/hour", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||
)
|
||||
def forgot_password():
|
||||
form = ForgotPasswordForm(request.form)
|
||||
@ -37,6 +37,5 @@ def forgot_password():
|
||||
if user:
|
||||
LOG.d("Send forgot password email to %s", user)
|
||||
send_reset_password_email(user)
|
||||
return redirect(url_for("auth.forgot_password"))
|
||||
|
||||
return render_template("auth/forgot_password.html", form=form)
|
||||
|
@ -7,7 +7,7 @@ from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import User, File, SocialAuth
|
||||
from app.utils import random_string, sanitize_email
|
||||
from app.utils import random_string, sanitize_email, sanitize_next_url
|
||||
from .login_utils import after_login
|
||||
|
||||
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
|
||||
@ -29,7 +29,7 @@ def google_login():
|
||||
# to avoid flask-login displaying the login error message
|
||||
session.pop("_flashes", None)
|
||||
|
||||
next_url = request.args.get("next")
|
||||
next_url = sanitize_next_url(request.args.get("next"))
|
||||
|
||||
# Google does not allow to append param to redirect_url
|
||||
# we need to pass the next url by session
|
||||
|
@ -54,6 +54,12 @@ def login():
|
||||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
||||
elif user.delete_on is not None:
|
||||
flash(
|
||||
f"Your account is scheduled to be deleted on {user.delete_on}",
|
||||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
|
||||
elif not user.activated:
|
||||
show_resend_activation = True
|
||||
flash(
|
||||
|
@ -55,7 +55,7 @@ def mfa():
|
||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
@ -73,7 +73,7 @@ def mfa():
|
||||
Session.commit()
|
||||
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
@ -53,7 +53,7 @@ def recovery_route():
|
||||
del session[MFA_USER_ID]
|
||||
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
recovery_code.used = True
|
||||
recovery_code.used_at = arrow.now()
|
||||
|
@ -94,9 +94,7 @@ def register():
|
||||
try:
|
||||
send_activation_email(user, next_url)
|
||||
RegisterEvent(RegisterEvent.ActionType.success).send()
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
|
||||
1
|
||||
)
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
|
||||
Session.commit()
|
||||
except Exception:
|
||||
flash("Invalid email, are you sure the email is correct?", "error")
|
||||
|
@ -60,8 +60,8 @@ def reset_password():
|
||||
# this can be served to activate user too
|
||||
user.activated = True
|
||||
|
||||
# remove the reset password code
|
||||
ResetPasswordCode.delete(reset_password_code.id)
|
||||
# remove all reset password codes
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
|
||||
# change the alternative_id to log user out on other browsers
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
|
@ -179,6 +179,7 @@ AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
|
||||
BUCKET = os.environ.get("BUCKET")
|
||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
||||
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
|
||||
|
||||
# Paddle
|
||||
try:
|
||||
@ -357,6 +358,7 @@ ALERT_COMPLAINT_TRANSACTIONAL_PHASE = "alert_complaint_transactional_phase"
|
||||
ALERT_QUARANTINE_DMARC = "alert_quarantine_dmarc"
|
||||
|
||||
ALERT_DUAL_SUBSCRIPTION_WITH_PARTNER = "alert_dual_sub_with_partner"
|
||||
ALERT_WARN_MULTIPLE_SUBSCRIPTIONS = "alert_multiple_subscription"
|
||||
|
||||
# <<<<< END ALERT EMAIL >>>>
|
||||
|
||||
@ -419,6 +421,8 @@ try:
|
||||
except Exception:
|
||||
HIBP_SCAN_INTERVAL_DAYS = 7
|
||||
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
|
||||
HIBP_MAX_ALIAS_CHECK = 10_000
|
||||
HIBP_RPM = 100
|
||||
|
||||
POSTMASTER = os.environ.get("POSTMASTER")
|
||||
|
||||
@ -487,7 +491,34 @@ def setup_nameservers():
|
||||
|
||||
NAMESERVERS = setup_nameservers()
|
||||
|
||||
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = False
|
||||
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get(
|
||||
"DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False
|
||||
)
|
||||
|
||||
|
||||
# Expect format hits,seconds:hits,seconds...
|
||||
# Example 1,10:4,60 means 1 in the last 10 secs or 4 in the last 60 secs
|
||||
def getRateLimitFromConfig(
|
||||
env_var: string, default: string = ""
|
||||
) -> list[tuple[int, int]]:
|
||||
value = os.environ.get(env_var, default)
|
||||
if not value:
|
||||
return []
|
||||
entries = [entry for entry in value.split(":")]
|
||||
limits = []
|
||||
for entry in entries:
|
||||
fields = entry.split(",")
|
||||
limit = (int(fields[0]), int(fields[1]))
|
||||
limits.append(limit)
|
||||
return limits
|
||||
|
||||
|
||||
ALIAS_CREATE_RATE_LIMIT_FREE = getRateLimitFromConfig(
|
||||
"ALIAS_CREATE_RATE_LIMIT_FREE", "10,900:50,3600"
|
||||
)
|
||||
ALIAS_CREATE_RATE_LIMIT_PAID = getRateLimitFromConfig(
|
||||
"ALIAS_CREATE_RATE_LIMIT_PAID", "50,900:200,3600"
|
||||
)
|
||||
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
|
||||
FLASK_SECRET + "partnerapitoken"
|
||||
)
|
||||
@ -531,3 +562,12 @@ if ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT:
|
||||
SKIP_MX_LOOKUP_ON_CHECK = False
|
||||
|
||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||
|
||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||
|
||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||
|
||||
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
|
||||
|
@ -32,4 +32,42 @@ from .views import (
|
||||
delete_account,
|
||||
notification,
|
||||
support,
|
||||
account_setting,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"pricing",
|
||||
"setting",
|
||||
"custom_alias",
|
||||
"subdomain",
|
||||
"billing",
|
||||
"alias_log",
|
||||
"alias_export",
|
||||
"unsubscribe",
|
||||
"api_key",
|
||||
"custom_domain",
|
||||
"alias_contact_manager",
|
||||
"enter_sudo",
|
||||
"mfa_setup",
|
||||
"mfa_cancel",
|
||||
"fido_setup",
|
||||
"coupon",
|
||||
"fido_manage",
|
||||
"domain_detail",
|
||||
"lifetime_licence",
|
||||
"directory",
|
||||
"mailbox",
|
||||
"mailbox_detail",
|
||||
"refused_email",
|
||||
"referral",
|
||||
"contact_detail",
|
||||
"setup_done",
|
||||
"batch_import",
|
||||
"alias_transfer",
|
||||
"app",
|
||||
"delete_account",
|
||||
"notification",
|
||||
"support",
|
||||
"account_setting",
|
||||
]
|
||||
|
242
app/app/dashboard/views/account_setting.py
Normal file
242
app/app/dashboard/views/account_setting.py
Normal file
@ -0,0 +1,242 @@
|
||||
import arrow
|
||||
from flask import (
|
||||
render_template,
|
||||
request,
|
||||
redirect,
|
||||
url_for,
|
||||
flash,
|
||||
)
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import email_utils
|
||||
from app.config import (
|
||||
URL,
|
||||
FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.dashboard.views.mailbox_detail import ChangeEmailForm
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
personal_email_already_used,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
BlockBehaviourEnum,
|
||||
PlanEnum,
|
||||
ResetPasswordCode,
|
||||
EmailChange,
|
||||
User,
|
||||
Alias,
|
||||
AliasGeneratorEnum,
|
||||
SenderFormatEnum,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.utils import perform_proton_account_unlink
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
canonicalize_email,
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/account_setting", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("5/minute", methods=["POST"])
|
||||
def account_setting():
|
||||
change_email_form = ChangeEmailForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
pending_email = email_change.new_email
|
||||
else:
|
||||
pending_email = None
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-email":
|
||||
if change_email_form.validate():
|
||||
# whether user can proceed with the email update
|
||||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
new_email_valid = False
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
# a pending email change with the same email exists from another user
|
||||
elif EmailChange.get_by(new_email=new_email):
|
||||
other_email_change: EmailChange = EmailChange.get_by(
|
||||
new_email=new_email
|
||||
)
|
||||
LOG.w(
|
||||
"Another user has a pending %s with the same email address. Current user:%s",
|
||||
other_email_change,
|
||||
current_user,
|
||||
)
|
||||
|
||||
if other_email_change.is_expired():
|
||||
LOG.d(
|
||||
"delete the expired email change %s", other_email_change
|
||||
)
|
||||
EmailChange.delete(other_email_change.id)
|
||||
Session.commit()
|
||||
else:
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
|
||||
if new_email_valid:
|
||||
email_change = EmailChange.create(
|
||||
user_id=current_user.id,
|
||||
code=random_string(
|
||||
60
|
||||
), # todo: make sure the code is unique
|
||||
new_email=new_email,
|
||||
)
|
||||
Session.commit()
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash(
|
||||
"A confirmation email is on the way, please check your inbox",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.account_setting"))
|
||||
elif request.form.get("form-name") == "change-password":
|
||||
flash(
|
||||
"You are going to receive an email containing instructions to change your password",
|
||||
"success",
|
||||
)
|
||||
send_reset_password_email(current_user)
|
||||
return redirect(url_for("dashboard.account_setting"))
|
||||
elif request.form.get("form-name") == "send-full-user-report":
|
||||
if ExportUserDataJob(current_user).store_job_in_db():
|
||||
flash(
|
||||
"You will receive your SimpleLogin data via email shortly",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
flash("An export of your data is currently in progress", "error")
|
||||
|
||||
partner_sub = None
|
||||
partner_name = None
|
||||
|
||||
return render_template(
|
||||
"dashboard/account_setting.html",
|
||||
csrf_form=csrf_form,
|
||||
PlanEnum=PlanEnum,
|
||||
SenderFormatEnum=SenderFormatEnum,
|
||||
BlockBehaviourEnum=BlockBehaviourEnum,
|
||||
change_email_form=change_email_form,
|
||||
pending_email=pending_email,
|
||||
AliasGeneratorEnum=AliasGeneratorEnum,
|
||||
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
|
||||
partner_sub=partner_sub,
|
||||
partner_name=partner_name,
|
||||
FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(user):
|
||||
"""
|
||||
generate a new ResetPasswordCode and send it over email to user
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=random_string(60)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
|
||||
|
||||
email_utils.send_reset_password_email(user.email, reset_password_link)
|
||||
|
||||
|
||||
def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||
"""
|
||||
send confirmation email to the new email address
|
||||
"""
|
||||
|
||||
link = f"{URL}/auth/change_email?code={email_change.code}"
|
||||
|
||||
email_utils.send_change_email(email_change.new_email, user.email, link)
|
||||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@limiter.limit("5/hour")
|
||||
@login_required
|
||||
@sudo_required
|
||||
def resend_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
email_change.expired = arrow.now().shift(hours=12)
|
||||
Session.commit()
|
||||
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash("A confirmation email is on the way, please check your inbox", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def cancel_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
Session.commit()
|
||||
flash("Your email change is cancelled", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def unlink_proton_account():
|
||||
csrf_form = CSRFValidationForm()
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
perform_proton_account_unlink(current_user)
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
@ -13,10 +13,10 @@ from app import config, parallel_limiter
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
is_valid_email,
|
||||
generate_reply_email,
|
||||
parse_full_address,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
@ -51,14 +51,6 @@ def email_validator():
|
||||
return _check
|
||||
|
||||
|
||||
def user_can_create_contacts(user: User) -> bool:
|
||||
if user.is_premium():
|
||||
return True
|
||||
if user.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
|
||||
return True
|
||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||
|
||||
|
||||
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
"""
|
||||
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
||||
@ -82,7 +74,7 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
if contact:
|
||||
raise ErrContactAlreadyExists(contact)
|
||||
|
||||
if not user_can_create_contacts(user):
|
||||
if not user.can_create_contacts():
|
||||
raise ErrContactErrorUpgradeNeeded()
|
||||
|
||||
contact = Contact.create(
|
||||
@ -90,7 +82,7 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, user),
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
)
|
||||
|
||||
LOG.d(
|
||||
@ -327,6 +319,6 @@ def alias_contact_manager(alias_id):
|
||||
last_page=last_page,
|
||||
query=query,
|
||||
nb_contact=nb_contact,
|
||||
can_create_contacts=user_can_create_contacts(current_user),
|
||||
can_create_contacts=current_user.can_create_contacts(),
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
@ -1,9 +1,11 @@
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from flask_login import login_required, current_user
|
||||
from app.alias_utils import alias_export_csv
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
|
||||
|
||||
@dashboard_bp.route("/alias_export", methods=["GET"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def alias_export_route():
|
||||
return alias_export_csv(current_user)
|
||||
|
@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
|
||||
contact=contact,
|
||||
)
|
||||
logs.append(al)
|
||||
logs = sorted(logs, key=lambda l: l.when, reverse=True)
|
||||
logs = sorted(logs, key=lambda log: log.when, reverse=True)
|
||||
|
||||
return logs
|
||||
|
@ -7,79 +7,19 @@ from flask import render_template, redirect, url_for, flash, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import config
|
||||
from app.alias_utils import transfer_alias
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
Contact,
|
||||
AliasUsedOn,
|
||||
AliasMailbox,
|
||||
User,
|
||||
ClientUser,
|
||||
)
|
||||
from app.models import Mailbox
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
||||
alias_hmac = hmac.new(
|
||||
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
||||
@ -214,7 +154,13 @@ def alias_transfer_receive_route():
|
||||
mailboxes,
|
||||
token,
|
||||
)
|
||||
transfer(alias, current_user, mailboxes)
|
||||
transfer_alias(alias, current_user, mailboxes)
|
||||
|
||||
# reset transfer token
|
||||
alias.transfer_token = None
|
||||
alias.transfer_token_expiration = None
|
||||
Session.commit()
|
||||
|
||||
flash(f"You are now owner of {alias.email}", "success")
|
||||
return redirect(url_for("dashboard.index", highlight_alias_id=alias.id))
|
||||
|
||||
|
@ -3,9 +3,11 @@ from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import config
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.models import ApiKey
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
@ -14,9 +16,34 @@ class NewApiKeyForm(FlaskForm):
|
||||
name = StringField("Name", validators=[validators.DataRequired()])
|
||||
|
||||
|
||||
def clean_up_unused_or_old_api_keys(user_id: int):
|
||||
total_keys = ApiKey.filter_by(user_id=user_id).count()
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
# Remove oldest unused
|
||||
for api_key in (
|
||||
ApiKey.filter_by(user_id=user_id, last_used=None)
|
||||
.order_by(ApiKey.created_at.asc())
|
||||
.all()
|
||||
):
|
||||
Session.delete(api_key)
|
||||
total_keys -= 1
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
# Clean up oldest used
|
||||
for api_key in (
|
||||
ApiKey.filter_by(user_id=user_id).order_by(ApiKey.last_used.asc()).all()
|
||||
):
|
||||
Session.delete(api_key)
|
||||
total_keys -= 1
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
|
||||
|
||||
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("10/hour")
|
||||
def api_key():
|
||||
api_keys = (
|
||||
ApiKey.filter(ApiKey.user_id == current_user.id)
|
||||
@ -50,6 +77,7 @@ def api_key():
|
||||
|
||||
elif request.form.get("form-name") == "create":
|
||||
if new_api_key_form.validate():
|
||||
clean_up_unused_or_old_api_keys(current_user.id)
|
||||
new_api_key = ApiKey.create(
|
||||
name=new_api_key_form.name.data, user_id=current_user.id
|
||||
)
|
||||
|
@ -1,14 +1,9 @@
|
||||
from app.db import Session
|
||||
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
from flask import render_template, request, flash, redirect
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import (
|
||||
ClientUser,
|
||||
)
|
||||
@ -17,6 +12,10 @@ from app.models import (
|
||||
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def app_route():
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
client_users = (
|
||||
ClientUser.filter_by(user_id=current_user.id)
|
||||
.options(joinedload(ClientUser.client))
|
||||
|
@ -5,6 +5,7 @@ from flask_login import login_required, current_user
|
||||
from app import s3
|
||||
from app.config import JOB_BATCH_IMPORT
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import File, BatchImport, Job
|
||||
@ -13,6 +14,7 @@ from app.utils import random_string, CSRFValidationForm
|
||||
|
||||
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def batch_import_route():
|
||||
# only for users who have custom domains
|
||||
if not current_user.verified_custom_domains():
|
||||
|
@ -68,9 +68,14 @@ def coupon_route():
|
||||
)
|
||||
return redirect(request.url)
|
||||
|
||||
coupon.used_by_user_id = current_user.id
|
||||
coupon.used = True
|
||||
Session.commit()
|
||||
updated = (
|
||||
Session.query(Coupon)
|
||||
.filter_by(code=code, used=False)
|
||||
.update({"used_by_user_id": current_user.id, "used": True})
|
||||
)
|
||||
if updated != 1:
|
||||
flash("Coupon is not valid", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||
user_id=current_user.id
|
||||
@ -95,7 +100,7 @@ def coupon_route():
|
||||
commit=True,
|
||||
)
|
||||
flash(
|
||||
f"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
@ -24,6 +24,7 @@ from app.models import (
|
||||
AliasMailbox,
|
||||
DomainDeletedAlias,
|
||||
)
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
|
||||
@ -48,9 +49,13 @@ def custom_alias():
|
||||
at_least_a_premium_domain = True
|
||||
break
|
||||
|
||||
csrf_form = CSRFValidationForm()
|
||||
mailboxes = current_user.mailboxes()
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
|
||||
signed_alias_suffix = request.form.get("signed-alias-suffix")
|
||||
mailbox_ids = request.form.getlist("mailboxes")
|
||||
@ -120,18 +125,11 @@ def custom_alias():
|
||||
email=full_alias
|
||||
)
|
||||
custom_domain = domain_deleted_alias.domain
|
||||
if domain_deleted_alias.user_id == current_user.id:
|
||||
flash(
|
||||
f"You have deleted this alias before. You can restore it on "
|
||||
f"{custom_domain.domain} 'Deleted Alias' page",
|
||||
"error",
|
||||
)
|
||||
else:
|
||||
# should never happen as user can only choose their domains
|
||||
LOG.e(
|
||||
"Deleted Alias %s does not belong to user %s",
|
||||
domain_deleted_alias,
|
||||
)
|
||||
flash(
|
||||
f"You have deleted this alias before. You can restore it on "
|
||||
f"{custom_domain.domain} 'Deleted Alias' page",
|
||||
"error",
|
||||
)
|
||||
|
||||
elif DeletedAlias.get_by(email=full_alias):
|
||||
flash(general_error_msg, "error")
|
||||
@ -171,4 +169,5 @@ def custom_alias():
|
||||
alias_suffixes=alias_suffixes,
|
||||
at_least_a_premium_domain=at_least_a_premium_domain,
|
||||
mailboxes=mailboxes,
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
@ -67,7 +67,7 @@ def directory():
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "delete":
|
||||
if not delete_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||
|
||||
@ -87,7 +87,7 @@ def directory():
|
||||
|
||||
if request.form.get("form-name") == "toggle-directory":
|
||||
if not toggle_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = toggle_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
@ -109,7 +109,7 @@ def directory():
|
||||
|
||||
elif request.form.get("form-name") == "update":
|
||||
if not update_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = update_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
|
@ -8,6 +8,7 @@ from wtforms import PasswordField, validators
|
||||
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import PartnerUser
|
||||
from app.proton.utils import get_proton_partner
|
||||
@ -21,6 +22,7 @@ class LoginForm(FlaskForm):
|
||||
|
||||
|
||||
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
||||
@limiter.limit("3/minute")
|
||||
@login_required
|
||||
def enter_sudo():
|
||||
password_check_form = LoginForm()
|
||||
|
@ -52,12 +52,13 @@ def get_stats(user: User) -> Stats:
|
||||
|
||||
|
||||
@dashboard_bp.route("/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@limiter.limit(
|
||||
ALIAS_LIMIT,
|
||||
methods=["POST"],
|
||||
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
||||
)
|
||||
@login_required
|
||||
@limiter.limit("10/minute", methods=["GET"], key_func=lambda: current_user.id)
|
||||
@parallel_limiter.lock(
|
||||
name="alias_creation",
|
||||
only_when=lambda: request.form.get("form-name") == "create-random-email",
|
||||
|
@ -1,3 +1,7 @@
|
||||
import base64
|
||||
import binascii
|
||||
import json
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
@ -15,8 +19,8 @@ from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
render,
|
||||
send_email,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import CSRFValidationForm
|
||||
@ -180,7 +184,9 @@ def mailbox_route():
|
||||
|
||||
def send_verification_email(user, mailbox):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
|
||||
b64_data = base64.urlsafe_b64encode(encoded_data)
|
||||
mailbox_id_signed = s.sign(b64_data).decode()
|
||||
verification_url = (
|
||||
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
||||
)
|
||||
@ -205,22 +211,34 @@ def send_verification_email(user, mailbox):
|
||||
@dashboard_bp.route("/mailbox_verify")
|
||||
def mailbox_verify():
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
|
||||
mailbox_verify_request = request.args.get("mailbox_id")
|
||||
try:
|
||||
r_id = int(s.unsign(mailbox_id, max_age=900))
|
||||
mailbox_raw_data = s.unsign(mailbox_verify_request, max_age=900)
|
||||
except Exception:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
else:
|
||||
mailbox = Mailbox.get(r_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
try:
|
||||
decoded_data = base64.urlsafe_b64decode(mailbox_raw_data)
|
||||
except binascii.Error:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_data = json.loads(decoded_data)
|
||||
if not isinstance(mailbox_data, list) or len(mailbox_data) != 2:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_id = mailbox_data[0]
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_email = mailbox_data[1]
|
||||
if mailbox_email != mailbox.email:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
|
@ -30,7 +30,7 @@ class ChangeEmailForm(FlaskForm):
|
||||
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def mailbox_detail_route(mailbox_id):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
@ -144,6 +144,15 @@ def mailbox_detail_route(mailbox_id):
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
if mailbox.is_proton():
|
||||
flash(
|
||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
||||
"info",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.pgp_public_key = request.form.get("pgp")
|
||||
try:
|
||||
mailbox.pgp_finger_print = load_public_key_and_check(
|
||||
@ -182,25 +191,16 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
elif request.form.get("form-name") == "generic-subject":
|
||||
if request.form.get("action") == "save":
|
||||
if not mailbox.pgp_enabled():
|
||||
flash(
|
||||
"Generic subject can only be used on PGP-enabled mailbox",
|
||||
"error",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.generic_subject = request.form.get("generic-subject")
|
||||
Session.commit()
|
||||
flash("Generic subject for PGP-encrypted email is enabled", "success")
|
||||
flash("Generic subject is enabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
mailbox.generic_subject = None
|
||||
Session.commit()
|
||||
flash("Generic subject for PGP-encrypted email is disabled", "success")
|
||||
flash("Generic subject is disabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
@ -80,8 +80,9 @@ def pricing():
|
||||
@dashboard_bp.route("/subscription_success")
|
||||
@login_required
|
||||
def subscription_success():
|
||||
flash("Thanks so much for supporting SimpleLogin!", "success")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
return render_template(
|
||||
"dashboard/thank-you.html",
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/coinbase_checkout")
|
||||
|
@ -13,34 +13,24 @@ from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from flask_wtf.file import FileField
|
||||
from wtforms import StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from app import s3, email_utils
|
||||
from app import s3
|
||||
from app.config import (
|
||||
URL,
|
||||
FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
personal_email_already_used,
|
||||
)
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.extensions import limiter
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
BlockBehaviourEnum,
|
||||
PlanEnum,
|
||||
File,
|
||||
ResetPasswordCode,
|
||||
EmailChange,
|
||||
User,
|
||||
Alias,
|
||||
CustomDomain,
|
||||
AliasGeneratorEnum,
|
||||
AliasSuffixEnum,
|
||||
@ -53,11 +43,10 @@ from app.models import (
|
||||
PartnerSubscription,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner, perform_proton_account_unlink
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
canonicalize_email,
|
||||
)
|
||||
|
||||
|
||||
@ -66,12 +55,6 @@ class SettingForm(FlaskForm):
|
||||
profile_picture = FileField("Profile Picture")
|
||||
|
||||
|
||||
class ChangeEmailForm(FlaskForm):
|
||||
email = EmailField(
|
||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||
)
|
||||
|
||||
|
||||
class PromoCodeForm(FlaskForm):
|
||||
code = StringField("Name", validators=[validators.DataRequired()])
|
||||
|
||||
@ -109,7 +92,6 @@ def get_partner_subscription_and_name(
|
||||
def setting():
|
||||
form = SettingForm()
|
||||
promo_form = PromoCodeForm()
|
||||
change_email_form = ChangeEmailForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
@ -122,64 +104,7 @@ def setting():
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-email":
|
||||
if change_email_form.validate():
|
||||
# whether user can proceed with the email update
|
||||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
new_email_valid = False
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
# a pending email change with the same email exists from another user
|
||||
elif EmailChange.get_by(new_email=new_email):
|
||||
other_email_change: EmailChange = EmailChange.get_by(
|
||||
new_email=new_email
|
||||
)
|
||||
LOG.w(
|
||||
"Another user has a pending %s with the same email address. Current user:%s",
|
||||
other_email_change,
|
||||
current_user,
|
||||
)
|
||||
|
||||
if other_email_change.is_expired():
|
||||
LOG.d(
|
||||
"delete the expired email change %s", other_email_change
|
||||
)
|
||||
EmailChange.delete(other_email_change.id)
|
||||
Session.commit()
|
||||
else:
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
|
||||
if new_email_valid:
|
||||
email_change = EmailChange.create(
|
||||
user_id=current_user.id,
|
||||
code=random_string(
|
||||
60
|
||||
), # todo: make sure the code is unique
|
||||
new_email=new_email,
|
||||
)
|
||||
Session.commit()
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash(
|
||||
"A confirmation email is on the way, please check your inbox",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-profile":
|
||||
if form.validate():
|
||||
profile_updated = False
|
||||
@ -198,6 +123,16 @@ def setting():
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if current_user.profile_picture_id is not None:
|
||||
current_profile_file = File.get_by(
|
||||
id=current_user.profile_picture_id
|
||||
)
|
||||
if (
|
||||
current_profile_file is not None
|
||||
and current_profile_file.user_id == current_user.id
|
||||
):
|
||||
s3.delete(current_profile_file.path)
|
||||
|
||||
file_path = random_string(30)
|
||||
file = File.create(user_id=current_user.id, path=file_path)
|
||||
|
||||
@ -213,15 +148,6 @@ def setting():
|
||||
if profile_updated:
|
||||
flash("Your profile has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-password":
|
||||
flash(
|
||||
"You are going to receive an email containing instructions to change your password",
|
||||
"success",
|
||||
)
|
||||
send_reset_password_email(current_user)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "notification-preference":
|
||||
choose = request.form.get("notification")
|
||||
if choose == "on":
|
||||
@ -231,7 +157,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your notification preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-alias-generator":
|
||||
scheme = int(request.form.get("alias-generator-scheme"))
|
||||
if AliasGeneratorEnum.has_value(scheme):
|
||||
@ -239,7 +164,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-random-alias-default-domain":
|
||||
default_domain = request.form.get("random-alias-default-domain")
|
||||
|
||||
@ -278,7 +202,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "random-alias-suffix":
|
||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||
if AliasSuffixEnum.has_value(scheme):
|
||||
@ -286,7 +209,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-sender-format":
|
||||
sender_format = int(request.form.get("sender-format"))
|
||||
if SenderFormatEnum.has_value(sender_format):
|
||||
@ -296,7 +218,6 @@ def setting():
|
||||
flash("Your sender format preference has been updated", "success")
|
||||
Session.commit()
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "replace-ra":
|
||||
choose = request.form.get("replace-ra")
|
||||
if choose == "on":
|
||||
@ -306,7 +227,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "sender-in-ra":
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
@ -316,7 +236,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "expand-alias-info":
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
@ -378,14 +297,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "send-full-user-report":
|
||||
if ExportUserDataJob(current_user).store_job_in_db():
|
||||
flash(
|
||||
"You will receive your SimpleLogin data via email shortly",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
flash("An export of your data is currently in progress", "error")
|
||||
|
||||
manual_sub = ManualSubscription.get_by(user_id=current_user.id)
|
||||
apple_sub = AppleSubscription.get_by(user_id=current_user.id)
|
||||
@ -408,7 +319,6 @@ def setting():
|
||||
SenderFormatEnum=SenderFormatEnum,
|
||||
BlockBehaviourEnum=BlockBehaviourEnum,
|
||||
promo_form=promo_form,
|
||||
change_email_form=change_email_form,
|
||||
pending_email=pending_email,
|
||||
AliasGeneratorEnum=AliasGeneratorEnum,
|
||||
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
|
||||
@ -423,76 +333,3 @@ def setting():
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
proton_linked_account=proton_linked_account,
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(user):
|
||||
"""
|
||||
generate a new ResetPasswordCode and send it over email to user
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=random_string(60)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
|
||||
|
||||
email_utils.send_reset_password_email(user.email, reset_password_link)
|
||||
|
||||
|
||||
def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||
"""
|
||||
send confirmation email to the new email address
|
||||
"""
|
||||
|
||||
link = f"{URL}/auth/change_email?code={email_change.code}"
|
||||
|
||||
email_utils.send_change_email(email_change.new_email, user.email, link)
|
||||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def resend_email_change():
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
email_change.expired = arrow.now().shift(hours=12)
|
||||
Session.commit()
|
||||
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash("A confirmation email is on the way, please check your inbox", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def cancel_email_change():
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
Session.commit()
|
||||
flash("Your email change is cancelled", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
|
||||
@login_required
|
||||
def unlink_proton_account():
|
||||
csrf_form = CSRFValidationForm()
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
perform_proton_account_unlink(current_user)
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
@ -75,12 +75,11 @@ def block_contact(contact_id):
|
||||
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
||||
@login_required
|
||||
def encoded_unsubscribe(encoded_request: str):
|
||||
|
||||
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
||||
current_user, encoded_request
|
||||
)
|
||||
if not unsub_data:
|
||||
flash(f"Invalid unsubscribe request", "error")
|
||||
flash("Invalid unsubscribe request", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
||||
alias = Alias.get(unsub_data.data)
|
||||
@ -97,14 +96,14 @@ def encoded_unsubscribe(encoded_request: str):
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
||||
flash(f"You've unsubscribed from the newsletter", "success")
|
||||
flash("You've unsubscribed from the newsletter", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
flash(f"The original unsubscribe request has been forwarded", "success")
|
||||
flash("The original unsubscribe request has been forwarded", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
|
@ -1 +1,3 @@
|
||||
from .views import index, new_client, client_detail
|
||||
|
||||
__all__ = ["index", "new_client", "client_detail"]
|
||||
|
@ -87,7 +87,7 @@ def client_detail(client_id):
|
||||
)
|
||||
|
||||
flash(
|
||||
f"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
@ -1 +1,3 @@
|
||||
from .views import index
|
||||
|
||||
__all__ = ["index"]
|
||||
|
@ -34,7 +34,7 @@ def get_cname_record(hostname) -> Optional[str]:
|
||||
|
||||
|
||||
def get_mx_domains(hostname) -> [(int, str)]:
|
||||
"""return list of (priority, domain name).
|
||||
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
@ -50,7 +50,7 @@ def get_mx_domains(hostname) -> [(int, str)]:
|
||||
|
||||
ret.append((int(parts[0]), parts[1]))
|
||||
|
||||
return ret
|
||||
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
||||
|
||||
|
||||
_include_spf = "include:"
|
||||
|
@ -20,6 +20,7 @@ X_SPAM_STATUS = "X-Spam-Status"
|
||||
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||
RETURN_PATH = "Return-Path"
|
||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||
|
||||
# headers used to DKIM sign in order of preference
|
||||
DKIM_HEADERS = [
|
||||
@ -32,6 +33,7 @@ DKIM_HEADERS = [
|
||||
SL_DIRECTION = "X-SimpleLogin-Type"
|
||||
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
||||
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
||||
SL_ORIGINAL_FROM = "X-SimpleLogin-Original-From"
|
||||
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
||||
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
||||
|
||||
|
@ -60,4 +60,5 @@ E522 = (
|
||||
)
|
||||
E523 = "550 SL E523 Unknown error"
|
||||
E524 = "550 SL E524 Wrong use of reverse-alias"
|
||||
E525 = "550 SL E525 Alias loop"
|
||||
# endregion
|
||||
|
@ -54,6 +54,7 @@ from app.models import (
|
||||
IgnoreBounceSender,
|
||||
InvalidMailboxDomain,
|
||||
VerpType,
|
||||
available_sl_email,
|
||||
)
|
||||
from app.utils import (
|
||||
random_string,
|
||||
@ -92,7 +93,7 @@ def send_welcome_email(user):
|
||||
|
||||
send_email(
|
||||
comm_email,
|
||||
f"Welcome to SimpleLogin",
|
||||
"Welcome to SimpleLogin",
|
||||
render("com/welcome.txt", user=user, alias=alias),
|
||||
render("com/welcome.html", user=user, alias=alias),
|
||||
unsubscribe_link,
|
||||
@ -103,7 +104,7 @@ def send_welcome_email(user):
|
||||
def send_trial_end_soon_email(user):
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your trial will end soon",
|
||||
"Your trial will end soon",
|
||||
render("transactional/trial-end.txt.jinja2", user=user),
|
||||
render("transactional/trial-end.html", user=user),
|
||||
ignore_smtp_error=True,
|
||||
@ -113,7 +114,7 @@ def send_trial_end_soon_email(user):
|
||||
def send_activation_email(email, activation_link):
|
||||
send_email(
|
||||
email,
|
||||
f"Just one more step to join SimpleLogin",
|
||||
"Just one more step to join SimpleLogin",
|
||||
render(
|
||||
"transactional/activation.txt",
|
||||
activation_link=activation_link,
|
||||
@ -582,6 +583,26 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
|
||||
LOG.d("MX Domain %s %s is invalid mailbox domain", mx_domain, domain)
|
||||
return False
|
||||
|
||||
existing_user = User.get_by(email=email_address)
|
||||
if existing_user and existing_user.disabled:
|
||||
LOG.d(
|
||||
f"User {existing_user} is disabled. {email_address} cannot be used for other mailbox"
|
||||
)
|
||||
return False
|
||||
|
||||
for existing_user in (
|
||||
User.query()
|
||||
.join(Mailbox, User.id == Mailbox.user_id)
|
||||
.filter(Mailbox.email == email_address)
|
||||
.group_by(User.id)
|
||||
.all()
|
||||
):
|
||||
if existing_user.disabled:
|
||||
LOG.d(
|
||||
f"User {existing_user} is disabled and has a mailbox with {email_address}. Id cannot be used for other mailbox"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -767,7 +788,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
|
||||
ret = ""
|
||||
for to_decoded_str, charset in decode_header(header):
|
||||
if charset is None:
|
||||
if type(to_decoded_str) is bytes:
|
||||
if isinstance(to_decoded_str, bytes):
|
||||
decoded_str = to_decoded_str.decode()
|
||||
else:
|
||||
decoded_str = to_decoded_str
|
||||
@ -804,13 +825,13 @@ def to_bytes(msg: Message):
|
||||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||
try:
|
||||
return msg.as_bytes(policy=generator_policy)
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||
|
||||
msg_string = msg.as_string()
|
||||
try:
|
||||
return msg_string.encode()
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_string().encode() fails", exc_info=True)
|
||||
|
||||
return msg_string.encode(errors="replace")
|
||||
@ -827,19 +848,6 @@ def should_add_dkim_signature(domain: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def is_valid_email(email_address: str) -> bool:
|
||||
"""
|
||||
Used to check whether an email address is valid
|
||||
NOT run MX check.
|
||||
NOT allow unicode.
|
||||
"""
|
||||
try:
|
||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
|
||||
class EmailEncoding(enum.Enum):
|
||||
BASE64 = "base64"
|
||||
QUOTED = "quoted-printable"
|
||||
@ -918,7 +926,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
if content_type == "text/plain":
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
clone_msg = copy(msg)
|
||||
new_payload = f"""{text_header}
|
||||
------------------------------
|
||||
@ -928,7 +936,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
elif content_type == "text/html":
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
||||
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
||||
<tr>
|
||||
@ -950,6 +958,8 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
for part in msg.get_payload():
|
||||
if isinstance(part, Message):
|
||||
new_parts.append(add_header(part, text_header, html_header))
|
||||
elif isinstance(part, str):
|
||||
new_parts.append(MIMEText(part))
|
||||
else:
|
||||
new_parts.append(part)
|
||||
clone_msg = copy(msg)
|
||||
@ -958,7 +968,14 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
|
||||
elif content_type in ("multipart/mixed", "multipart/signed"):
|
||||
new_parts = []
|
||||
parts = list(msg.get_payload())
|
||||
payload = msg.get_payload()
|
||||
if isinstance(payload, str):
|
||||
# The message is badly formatted inject as new
|
||||
new_parts = [MIMEText(text_header, "plain"), MIMEText(payload, "plain")]
|
||||
clone_msg = copy(msg)
|
||||
clone_msg.set_payload(new_parts)
|
||||
return clone_msg
|
||||
parts = list(payload)
|
||||
LOG.d("only add header for the first part for %s", content_type)
|
||||
for ix, part in enumerate(parts):
|
||||
if ix == 0:
|
||||
@ -975,7 +992,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
|
||||
|
||||
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||
if type(msg) is str:
|
||||
if isinstance(msg, str):
|
||||
msg = msg.replace(old, new)
|
||||
return msg
|
||||
|
||||
@ -998,7 +1015,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||
if content_type in ("text/plain", "text/html"):
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
if encoding == EmailEncoding.QUOTED:
|
||||
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
||||
# first decode the payload
|
||||
@ -1043,7 +1060,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||
return msg
|
||||
|
||||
|
||||
def generate_reply_email(contact_email: str, user: User) -> str:
|
||||
def generate_reply_email(contact_email: str, alias: Alias) -> str:
|
||||
"""
|
||||
generate a reply_email (aka reverse-alias), make sure it isn't used by any contact
|
||||
"""
|
||||
@ -1054,6 +1071,7 @@ def generate_reply_email(contact_email: str, user: User) -> str:
|
||||
|
||||
include_sender_in_reverse_alias = False
|
||||
|
||||
user = alias.user
|
||||
# user has set this option explicitly
|
||||
if user.include_sender_in_reverse_alias is not None:
|
||||
include_sender_in_reverse_alias = user.include_sender_in_reverse_alias
|
||||
@ -1068,6 +1086,12 @@ def generate_reply_email(contact_email: str, user: User) -> str:
|
||||
contact_email = contact_email.replace(".", "_")
|
||||
contact_email = convert_to_alphanumeric(contact_email)
|
||||
|
||||
reply_domain = config.EMAIL_DOMAIN
|
||||
alias_domain = get_email_domain_part(alias.email)
|
||||
sl_domain = SLDomain.get_by(domain=alias_domain)
|
||||
if sl_domain and sl_domain.use_as_reverse_alias:
|
||||
reply_domain = alias_domain
|
||||
|
||||
# not use while to avoid infinite loop
|
||||
for _ in range(1000):
|
||||
if include_sender_in_reverse_alias and contact_email:
|
||||
@ -1075,15 +1099,15 @@ def generate_reply_email(contact_email: str, user: User) -> str:
|
||||
reply_email = (
|
||||
# do not use the ra+ anymore
|
||||
# f"ra+{contact_email}+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
f"{contact_email}_{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
f"{contact_email}_{random_string(random_length)}@{reply_domain}"
|
||||
)
|
||||
else:
|
||||
random_length = random.randint(20, 50)
|
||||
# do not use the ra+ anymore
|
||||
# reply_email = f"ra+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
reply_email = f"{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
reply_email = f"{random_string(random_length)}@{reply_domain}"
|
||||
|
||||
if not Contact.get_by(reply_email=reply_email):
|
||||
if available_sl_email(reply_email):
|
||||
return reply_email
|
||||
|
||||
raise Exception("Cannot generate reply email")
|
||||
@ -1099,26 +1123,6 @@ def is_reverse_alias(address: str) -> bool:
|
||||
)
|
||||
|
||||
|
||||
# allow also + and @ that are present in a reply address
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||
|
||||
|
||||
def normalize_reply_email(reply_email: str) -> str:
|
||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||
if not reply_email.isascii():
|
||||
reply_email = convert_to_id(reply_email)
|
||||
|
||||
ret = []
|
||||
# drop all control characters like shift, separator, etc
|
||||
for c in reply_email:
|
||||
if c not in _ALLOWED_CHARS:
|
||||
ret.append("_")
|
||||
else:
|
||||
ret.append(c)
|
||||
|
||||
return "".join(ret)
|
||||
|
||||
|
||||
def should_disable(alias: Alias) -> (bool, str):
|
||||
"""
|
||||
Return whether an alias should be disabled and if yes, the reason why
|
||||
@ -1399,7 +1403,7 @@ def generate_verp_email(
|
||||
# Time is in minutes granularity and start counting on 2022-01-01 to reduce bytes to represent time
|
||||
data = [
|
||||
verp_type.value,
|
||||
object_id,
|
||||
object_id or 0,
|
||||
int((time.time() - VERP_TIME_START) / 60),
|
||||
]
|
||||
json_payload = json.dumps(data).encode("utf-8")
|
||||
|
38
app/app/email_validation.py
Normal file
38
app/app/email_validation.py
Normal file
@ -0,0 +1,38 @@
|
||||
from email_validator import (
|
||||
validate_email,
|
||||
EmailNotValidError,
|
||||
)
|
||||
|
||||
from app.utils import convert_to_id
|
||||
|
||||
# allow also + and @ that are present in a reply address
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||
|
||||
|
||||
def is_valid_email(email_address: str) -> bool:
|
||||
"""
|
||||
Used to check whether an email address is valid
|
||||
NOT run MX check.
|
||||
NOT allow unicode.
|
||||
"""
|
||||
try:
|
||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
|
||||
def normalize_reply_email(reply_email: str) -> str:
|
||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||
if not reply_email.isascii():
|
||||
reply_email = convert_to_id(reply_email)
|
||||
|
||||
ret = []
|
||||
# drop all control characters like shift, separator, etc
|
||||
for c in reply_email:
|
||||
if c not in _ALLOWED_CHARS:
|
||||
ret.append("_")
|
||||
else:
|
||||
ret.append(c)
|
||||
|
||||
return "".join(ret)
|
@ -71,7 +71,7 @@ class ErrContactErrorUpgradeNeeded(SLException):
|
||||
"""raised when user cannot create a contact because the plan doesn't allow it"""
|
||||
|
||||
def error_for_user(self) -> str:
|
||||
return f"Please upgrade to premium to create reverse-alias"
|
||||
return "Please upgrade to premium to create reverse-alias"
|
||||
|
||||
|
||||
class ErrAddressInvalid(SLException):
|
||||
@ -84,6 +84,14 @@ class ErrAddressInvalid(SLException):
|
||||
return f"{self.address} is not a valid email address"
|
||||
|
||||
|
||||
class InvalidContactEmailError(SLException):
|
||||
def __init__(self, website_email: str): # noqa: F821
|
||||
self.website_email = website_email
|
||||
|
||||
def error_for_user(self) -> str:
|
||||
return f"Cannot create contact with invalid email {self.website_email}"
|
||||
|
||||
|
||||
class ErrContactAlreadyExists(SLException):
|
||||
"""raised when a contact already exists"""
|
||||
|
||||
@ -108,3 +116,15 @@ class AccountAlreadyLinkedToAnotherPartnerException(LinkException):
|
||||
class AccountAlreadyLinkedToAnotherUserException(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__("This account is linked to another user")
|
||||
|
||||
|
||||
class AccountIsUsingAliasAsEmail(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__("Your account has an alias as it's email address")
|
||||
|
||||
|
||||
class ProtonAccountNotVerified(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"The Proton account you are trying to use has not been verified"
|
||||
)
|
||||
|
@ -9,6 +9,7 @@ class LoginEvent:
|
||||
failed = 1
|
||||
disabled_login = 2
|
||||
not_activated = 3
|
||||
scheduled_to_be_deleted = 4
|
||||
|
||||
class Source(EnumE):
|
||||
web = 0
|
||||
|
@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
|
||||
|
||||
from_header = get_header_unicode(msg[headers.FROM])
|
||||
|
||||
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||
"""
|
||||
warning_html = f"""
|
||||
warning_html = """
|
||||
<p style="color:red">
|
||||
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
||||
@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
|
||||
refused_email = RefusedEmail.create(
|
||||
full_report_path=s3_report_path, user_id=alias.user_id, flush=True
|
||||
)
|
||||
return EmailLog.create(
|
||||
email_log = EmailLog.create(
|
||||
user_id=alias.user_id,
|
||||
mailbox_id=alias.mailbox_id,
|
||||
contact_id=contact.id,
|
||||
@ -142,6 +142,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
|
||||
blocked=True,
|
||||
commit=True,
|
||||
)
|
||||
return email_log
|
||||
|
||||
|
||||
def apply_dmarc_policy_for_reply_phase(
|
||||
|
@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
||||
return True
|
||||
|
||||
if is_deleted_alias(msg_info.sender_address):
|
||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||
return True
|
||||
|
||||
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
||||
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
||||
alias = find_alias_with_address(msg_info.rcpt_address)
|
||||
|
||||
if is_deleted_alias(msg_info.rcpt_address):
|
||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||
return True
|
||||
|
||||
if not alias:
|
||||
|
@ -54,9 +54,8 @@ class UnsubscribeEncoder:
|
||||
def encode_subject(
|
||||
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
||||
) -> str:
|
||||
if (
|
||||
action != UnsubscribeAction.OriginalUnsubscribeMailto
|
||||
and type(data) is not int
|
||||
if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
|
||||
data, int
|
||||
):
|
||||
raise ValueError(f"Data has to be an int for an action of type {action}")
|
||||
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
@ -74,8 +73,8 @@ class UnsubscribeEncoder:
|
||||
)
|
||||
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
|
||||
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
|
||||
if len(encoded_request) > 256:
|
||||
LOG.e("Encoded request is longer than 256 chars")
|
||||
if len(encoded_request) > 512:
|
||||
LOG.w("Encoded request is longer than 512 chars")
|
||||
return encoded_request
|
||||
|
||||
@staticmethod
|
||||
|
@ -1,7 +1,9 @@
|
||||
import urllib
|
||||
from email.header import Header
|
||||
from email.message import Message
|
||||
|
||||
from app.email import headers
|
||||
from app import config
|
||||
from app.email_utils import add_or_replace_header, delete_header
|
||||
from app.handler.unsubscribe_encoder import (
|
||||
UnsubscribeEncoder,
|
||||
@ -9,6 +11,7 @@ from app.handler.unsubscribe_encoder import (
|
||||
UnsubscribeData,
|
||||
UnsubscribeOriginalData,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
|
||||
|
||||
|
||||
@ -30,7 +33,10 @@ class UnsubscribeGenerator:
|
||||
"""
|
||||
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
|
||||
if not unsubscribe_data:
|
||||
LOG.info("Email has no unsubscribe header")
|
||||
return message
|
||||
if isinstance(unsubscribe_data, Header):
|
||||
unsubscribe_data = str(unsubscribe_data.encode())
|
||||
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
||||
mailto_unsubs = None
|
||||
other_unsubs = []
|
||||
@ -42,9 +48,16 @@ class UnsubscribeGenerator:
|
||||
method = raw_method[start + 1 : end]
|
||||
url_data = urllib.parse.urlparse(method)
|
||||
if url_data.scheme == "mailto":
|
||||
if url_data.path == config.UNSUBSCRIBER:
|
||||
LOG.debug(
|
||||
f"Skipping replacing unsubscribe since the original email already points to {config.UNSUBSCRIBER}"
|
||||
)
|
||||
return message
|
||||
query_data = urllib.parse.parse_qs(url_data.query)
|
||||
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
||||
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")
|
||||
else:
|
||||
LOG.debug(f"Unsub has {url_data.scheme} scheme")
|
||||
other_unsubs.append(method)
|
||||
# If there are non mailto unsubscribe methods, use those in the header
|
||||
if other_unsubs:
|
||||
@ -56,18 +69,19 @@ class UnsubscribeGenerator:
|
||||
add_or_replace_header(
|
||||
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
|
||||
)
|
||||
LOG.debug(f"Adding click unsub methods to header {other_unsubs}")
|
||||
return message
|
||||
if not mailto_unsubs:
|
||||
message = delete_header(message, headers.LIST_UNSUBSCRIBE)
|
||||
message = delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
||||
elif not mailto_unsubs:
|
||||
LOG.debug("No unsubs. Deleting all unsub headers")
|
||||
delete_header(message, headers.LIST_UNSUBSCRIBE)
|
||||
delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
||||
return message
|
||||
return self._add_unsubscribe_header(
|
||||
message,
|
||||
UnsubscribeData(
|
||||
UnsubscribeAction.OriginalUnsubscribeMailto,
|
||||
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
||||
),
|
||||
unsub_data = UnsubscribeData(
|
||||
UnsubscribeAction.OriginalUnsubscribeMailto,
|
||||
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
||||
)
|
||||
LOG.debug(f"Adding unsub data {unsub_data}")
|
||||
return self._add_unsubscribe_header(message, unsub_data)
|
||||
|
||||
def _add_unsubscribe_header(
|
||||
self, message: Message, unsub: UnsubscribeData
|
||||
|
@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
|
||||
|
||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||
r = requests.get(file_url)
|
||||
lines = [line.decode() for line in r.iter_lines()]
|
||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
||||
|
||||
import_from_csv(batch_import, user, lines)
|
||||
|
||||
|
@ -1,2 +1,4 @@
|
||||
from .integrations import set_enable_proton_cookie
|
||||
from .exit_sudo import exit_sudo_mode
|
||||
|
||||
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]
|
||||
|
@ -39,9 +39,8 @@ from app.models import (
|
||||
|
||||
|
||||
class ExportUserDataJob:
|
||||
|
||||
REMOVE_FIELDS = {
|
||||
"User": ("otp_secret",),
|
||||
"User": ("otp_secret", "password"),
|
||||
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
||||
"CustomDomain": ("ownership_txt_token",),
|
||||
}
|
||||
|
@ -17,12 +17,11 @@ from attr import dataclass
|
||||
from app import config
|
||||
from app.email import headers
|
||||
from app.log import LOG
|
||||
from app.message_utils import message_to_bytes
|
||||
from app.message_utils import message_to_bytes, message_format_base64_parts
|
||||
|
||||
|
||||
@dataclass
|
||||
class SendRequest:
|
||||
|
||||
SAVE_EXTENSION = "sendrequest"
|
||||
|
||||
envelope_from: str
|
||||
@ -32,6 +31,7 @@ class SendRequest:
|
||||
rcpt_options: Dict = {}
|
||||
is_forward: bool = False
|
||||
ignore_smtp_errors: bool = False
|
||||
retries: int = 0
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
if not config.SAVE_UNSENT_DIR:
|
||||
@ -45,6 +45,7 @@ class SendRequest:
|
||||
"mail_options": self.mail_options,
|
||||
"rcpt_options": self.rcpt_options,
|
||||
"is_forward": self.is_forward,
|
||||
"retries": self.retries,
|
||||
}
|
||||
return json.dumps(data).encode("utf-8")
|
||||
|
||||
@ -65,8 +66,33 @@ class SendRequest:
|
||||
mail_options=decoded_data["mail_options"],
|
||||
rcpt_options=decoded_data["rcpt_options"],
|
||||
is_forward=decoded_data["is_forward"],
|
||||
retries=decoded_data.get("retries", 1),
|
||||
)
|
||||
|
||||
def save_request_to_unsent_dir(self, prefix: str = "DeliveryFail"):
|
||||
file_name = (
|
||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
)
|
||||
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
||||
self.save_request_to_file(file_path)
|
||||
|
||||
@staticmethod
|
||||
def save_request_to_failed_dir(self, prefix: str = "DeliveryRetryFail"):
|
||||
file_name = (
|
||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
)
|
||||
dir_name = os.path.join(config.SAVE_UNSENT_DIR, "failed")
|
||||
if not os.path.isdir(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
file_path = os.path.join(dir_name, file_name)
|
||||
self.save_request_to_file(file_path)
|
||||
|
||||
def save_request_to_file(self, file_path: str):
|
||||
file_contents = self.to_bytes()
|
||||
with open(file_path, "wb") as fd:
|
||||
fd.write(file_contents)
|
||||
LOG.i(f"Saved unsent message {file_path}")
|
||||
|
||||
|
||||
class MailSender:
|
||||
def __init__(self):
|
||||
@ -170,17 +196,10 @@ class MailSender:
|
||||
LOG.e(
|
||||
f"Could not send message to smtp server {config.POSTFIX_SERVER}:{config.POSTFIX_PORT}"
|
||||
)
|
||||
self._save_request_to_unsent_dir(send_request)
|
||||
if config.SAVE_UNSENT_DIR:
|
||||
send_request.save_request_to_unsent_dir()
|
||||
return False
|
||||
|
||||
def _save_request_to_unsent_dir(self, send_request: SendRequest):
|
||||
file_name = f"DeliveryFail-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
||||
file_contents = send_request.to_bytes()
|
||||
with open(file_path, "wb") as fd:
|
||||
fd.write(file_contents)
|
||||
LOG.i(f"Saved unsent message {file_path}")
|
||||
|
||||
|
||||
mail_sender = MailSender()
|
||||
|
||||
@ -214,6 +233,7 @@ def load_unsent_mails_from_fs_and_resend():
|
||||
LOG.i(f"Trying to re-deliver email {filename}")
|
||||
try:
|
||||
send_request = SendRequest.load_from_file(full_file_path)
|
||||
send_request.retries += 1
|
||||
except Exception as e:
|
||||
LOG.e(f"Cannot load {filename}. Error {e}")
|
||||
continue
|
||||
@ -225,6 +245,11 @@ def load_unsent_mails_from_fs_and_resend():
|
||||
"DeliverUnsentEmail", {"delivered": "true"}
|
||||
)
|
||||
else:
|
||||
if send_request.retries > 2:
|
||||
os.unlink(full_file_path)
|
||||
send_request.save_request_to_failed_dir()
|
||||
else:
|
||||
send_request.save_request_to_file(full_file_path)
|
||||
newrelic.agent.record_custom_event(
|
||||
"DeliverUnsentEmail", {"delivered": "false"}
|
||||
)
|
||||
@ -256,7 +281,7 @@ def sl_sendmail(
|
||||
send_request = SendRequest(
|
||||
envelope_from,
|
||||
envelope_to,
|
||||
msg,
|
||||
message_format_base64_parts(msg),
|
||||
mail_options,
|
||||
rcpt_options,
|
||||
is_forward,
|
||||
|
@ -1,21 +1,42 @@
|
||||
import re
|
||||
from email import policy
|
||||
from email.message import Message
|
||||
|
||||
from app.email import headers
|
||||
from app.log import LOG
|
||||
|
||||
# Spam assassin might flag as spam with a different line length
|
||||
BASE64_LINELENGTH = 76
|
||||
|
||||
|
||||
def message_to_bytes(msg: Message) -> bytes:
|
||||
"""replace Message.as_bytes() method by trying different policies"""
|
||||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||
try:
|
||||
return msg.as_bytes(policy=generator_policy)
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||
|
||||
msg_string = msg.as_string()
|
||||
try:
|
||||
return msg_string.encode()
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_string().encode() fails", exc_info=True)
|
||||
|
||||
return msg_string.encode(errors="replace")
|
||||
|
||||
|
||||
def message_format_base64_parts(msg: Message) -> Message:
|
||||
for part in msg.walk():
|
||||
if part.get(
|
||||
headers.CONTENT_TRANSFER_ENCODING
|
||||
) == "base64" and part.get_content_type() in ("text/plain", "text/html"):
|
||||
# Remove line breaks
|
||||
body = re.sub("[\r\n]", "", part.get_payload())
|
||||
# Split in 80 column lines
|
||||
chunks = [
|
||||
body[i : i + BASE64_LINELENGTH]
|
||||
for i in range(0, len(body), BASE64_LINELENGTH)
|
||||
]
|
||||
part.set_payload("\r\n".join(chunks))
|
||||
return msg
|
||||
|
@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import dataclasses
|
||||
import enum
|
||||
import hashlib
|
||||
import hmac
|
||||
@ -18,7 +19,7 @@ from flanker.addresslib import address
|
||||
from flask import url_for
|
||||
from flask_login import UserMixin
|
||||
from jinja2 import FileSystemLoader, Environment
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy import orm, or_
|
||||
from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
@ -26,9 +27,11 @@ from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy_utils import ArrowType
|
||||
|
||||
from app import config
|
||||
from app import config, rate_limiter
|
||||
from app import s3
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_mx_domains
|
||||
|
||||
from app.errors import (
|
||||
AliasInTrashError,
|
||||
DirectoryInTrashError,
|
||||
@ -44,7 +47,6 @@ from app.utils import (
|
||||
random_string,
|
||||
random_words,
|
||||
sanitize_email,
|
||||
random_word,
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
@ -233,6 +235,7 @@ class AuditLogActionEnum(EnumE):
|
||||
download_provider_complaint = 8
|
||||
disable_user = 9
|
||||
enable_user = 10
|
||||
stop_trial = 11
|
||||
|
||||
|
||||
class Phase(EnumE):
|
||||
@ -274,6 +277,13 @@ class IntEnumType(sa.types.TypeDecorator):
|
||||
return self._enum_type(enum_value)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AliasOptions:
|
||||
show_sl_domains: bool = True
|
||||
show_partner_domains: Optional[Partner] = None
|
||||
show_partner_premium: Optional[bool] = None
|
||||
|
||||
|
||||
class Hibp(Base, ModelMixin):
|
||||
__tablename__ = "hibp"
|
||||
name = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||
@ -292,7 +302,9 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
||||
"""
|
||||
|
||||
__tablename__ = "hibp_notified_alias"
|
||||
alias_id = sa.Column(sa.ForeignKey("alias.id", ondelete="cascade"), nullable=False)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey("alias.id", ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=False)
|
||||
|
||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||
@ -333,7 +345,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||
)
|
||||
|
||||
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
||||
|
||||
# an account can be disabled if having harmful behavior
|
||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||
@ -403,7 +415,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
)
|
||||
|
||||
referral_id = sa.Column(
|
||||
sa.ForeignKey("referral.id", ondelete="SET NULL"), nullable=True, default=None
|
||||
sa.ForeignKey("referral.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
index=True,
|
||||
)
|
||||
|
||||
referral = orm.relationship("Referral", foreign_keys=[referral_id])
|
||||
@ -420,7 +435,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
# newsletter is sent to this address
|
||||
newsletter_alias_id = sa.Column(
|
||||
sa.ForeignKey("alias.id", ondelete="SET NULL"), nullable=True, default=None
|
||||
sa.ForeignKey("alias.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# whether to include the sender address in reverse-alias
|
||||
@ -434,7 +452,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
random_alias_suffix = sa.Column(
|
||||
sa.Integer,
|
||||
nullable=False,
|
||||
default=AliasSuffixEnum.random_string.value,
|
||||
default=AliasSuffixEnum.word.value,
|
||||
server_default=str(AliasSuffixEnum.random_string.value),
|
||||
)
|
||||
|
||||
@ -503,9 +521,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
server_default=BlockBehaviourEnum.return_2xx.name,
|
||||
)
|
||||
|
||||
# to keep existing behavior, the server default is TRUE whereas for new user, the default value is FALSE
|
||||
include_header_email_header = sa.Column(
|
||||
sa.Boolean, default=False, nullable=False, server_default="1"
|
||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||
)
|
||||
|
||||
# bitwise flags. Allow for future expansion
|
||||
@ -519,11 +536,21 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
# Keep original unsub behaviour
|
||||
unsub_behaviour = sa.Column(
|
||||
IntEnumType(UnsubscribeBehaviourEnum),
|
||||
default=UnsubscribeBehaviourEnum.DisableAlias,
|
||||
default=UnsubscribeBehaviourEnum.PreserveOriginal,
|
||||
server_default=str(UnsubscribeBehaviourEnum.DisableAlias.value),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Trigger hard deletion of the account at this time
|
||||
delete_on = sa.Column(ArrowType, default=None)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index(
|
||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||
),
|
||||
sa.Index("ix_users_delete_on", delete_on),
|
||||
)
|
||||
|
||||
@property
|
||||
def directory_quota(self):
|
||||
return min(
|
||||
@ -558,7 +585,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
@classmethod
|
||||
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
||||
user: User = super(User, cls).create(email=email, name=name, **kwargs)
|
||||
email = sanitize_email(email)
|
||||
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
|
||||
|
||||
if password:
|
||||
user.set_password(password)
|
||||
@ -569,19 +597,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
Session.flush()
|
||||
user.default_mailbox_id = mb.id
|
||||
|
||||
# create a first alias mail to show user how to use when they login
|
||||
alias = Alias.create_new(
|
||||
user,
|
||||
prefix="simplelogin-newsletter",
|
||||
mailbox_id=mb.id,
|
||||
note="This is your first alias. It's used to receive SimpleLogin communications "
|
||||
"like new features announcements, newsletters.",
|
||||
)
|
||||
Session.flush()
|
||||
|
||||
user.newsletter_alias_id = alias.id
|
||||
Session.flush()
|
||||
|
||||
# generate an alternative_id if needed
|
||||
if "alternative_id" not in kwargs:
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
@ -600,6 +615,19 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
Session.flush()
|
||||
return user
|
||||
|
||||
# create a first alias mail to show user how to use when they login
|
||||
alias = Alias.create_new(
|
||||
user,
|
||||
prefix="simplelogin-newsletter",
|
||||
mailbox_id=mb.id,
|
||||
note="This is your first alias. It's used to receive SimpleLogin communications "
|
||||
"like new features announcements, newsletters.",
|
||||
)
|
||||
Session.flush()
|
||||
|
||||
user.newsletter_alias_id = alias.id
|
||||
Session.flush()
|
||||
|
||||
if config.DISABLE_ONBOARDING:
|
||||
LOG.d("Disable onboarding emails")
|
||||
return user
|
||||
@ -625,7 +653,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
return user
|
||||
|
||||
def get_active_subscription(
|
||||
self,
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> Optional[
|
||||
Union[
|
||||
Subscription
|
||||
@ -653,19 +681,40 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
if coinbase_subscription and coinbase_subscription.is_active():
|
||||
return coinbase_subscription
|
||||
|
||||
partner_sub: PartnerSubscription = PartnerSubscription.find_by_user_id(self.id)
|
||||
if partner_sub and partner_sub.is_active():
|
||||
return partner_sub
|
||||
if include_partner_subscription:
|
||||
partner_sub: PartnerSubscription = PartnerSubscription.find_by_user_id(
|
||||
self.id
|
||||
)
|
||||
if partner_sub and partner_sub.is_active():
|
||||
return partner_sub
|
||||
|
||||
return None
|
||||
|
||||
def get_active_subscription_end(
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> Optional[arrow.Arrow]:
|
||||
sub = self.get_active_subscription(
|
||||
include_partner_subscription=include_partner_subscription
|
||||
)
|
||||
if isinstance(sub, Subscription):
|
||||
return arrow.get(sub.next_bill_date)
|
||||
if isinstance(sub, AppleSubscription):
|
||||
return sub.expires_date
|
||||
if isinstance(sub, ManualSubscription):
|
||||
return sub.end_at
|
||||
if isinstance(sub, CoinbaseSubscription):
|
||||
return sub.end_at
|
||||
return None
|
||||
|
||||
# region Billing
|
||||
def lifetime_or_active_subscription(self) -> bool:
|
||||
def lifetime_or_active_subscription(
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> bool:
|
||||
"""True if user has lifetime licence or active subscription"""
|
||||
if self.lifetime:
|
||||
return True
|
||||
|
||||
return self.get_active_subscription() is not None
|
||||
return self.get_active_subscription(include_partner_subscription) is not None
|
||||
|
||||
def is_paid(self) -> bool:
|
||||
"""same as _lifetime_or_active_subscription but not include free manual subscription"""
|
||||
@ -678,6 +727,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
return True
|
||||
|
||||
def is_active(self) -> bool:
|
||||
if self.delete_on is None:
|
||||
return True
|
||||
return self.delete_on < arrow.now()
|
||||
|
||||
def in_trial(self):
|
||||
"""return True if user does not have lifetime licence or an active subscription AND is in trial period"""
|
||||
if self.lifetime_or_active_subscription():
|
||||
@ -694,14 +748,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
return True
|
||||
|
||||
def is_premium(self) -> bool:
|
||||
def is_premium(self, include_partner_subscription: bool = True) -> bool:
|
||||
"""
|
||||
user is premium if they:
|
||||
- have a lifetime deal or
|
||||
- in trial period or
|
||||
- active subscription
|
||||
"""
|
||||
if self.lifetime_or_active_subscription():
|
||||
if self.lifetime_or_active_subscription(include_partner_subscription):
|
||||
return True
|
||||
|
||||
if self.trial_end and arrow.now() < self.trial_end:
|
||||
@ -779,6 +833,9 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
Whether user can create a new alias. User can't create a new alias if
|
||||
- has more than 15 aliases in the free plan, *even in the free trial*
|
||||
"""
|
||||
if not self.is_active():
|
||||
return False
|
||||
|
||||
if self.disabled:
|
||||
return False
|
||||
|
||||
@ -790,6 +847,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
< self.max_alias_for_free_account()
|
||||
)
|
||||
|
||||
def can_send_or_receive(self) -> bool:
|
||||
if self.disabled:
|
||||
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
|
||||
return False
|
||||
if self.delete_on is not None:
|
||||
LOG.i(
|
||||
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def profile_picture_url(self):
|
||||
if self.profile_picture_id:
|
||||
return self.profile_picture.get_url()
|
||||
@ -848,7 +916,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
return sub
|
||||
|
||||
def verified_custom_domains(self) -> List["CustomDomain"]:
|
||||
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all()
|
||||
return (
|
||||
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
|
||||
.order_by(CustomDomain.domain.asc())
|
||||
.all()
|
||||
)
|
||||
|
||||
def mailboxes(self) -> List["Mailbox"]:
|
||||
"""list of mailbox that user own"""
|
||||
@ -868,14 +940,16 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
def custom_domains(self):
|
||||
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
||||
|
||||
def available_domains_for_random_alias(self) -> List[Tuple[bool, str]]:
|
||||
def available_domains_for_random_alias(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> List[Tuple[bool, str]]:
|
||||
"""Return available domains for user to create random aliases
|
||||
Each result record contains:
|
||||
- whether the domain belongs to SimpleLogin
|
||||
- the domain
|
||||
"""
|
||||
res = []
|
||||
for domain in self.available_sl_domains():
|
||||
for domain in self.available_sl_domains(alias_options=alias_options):
|
||||
res.append((True, domain))
|
||||
|
||||
for custom_domain in self.verified_custom_domains():
|
||||
@ -960,30 +1034,65 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
return None, "", False
|
||||
|
||||
def available_sl_domains(self) -> [str]:
|
||||
def available_sl_domains(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> [str]:
|
||||
"""
|
||||
Return all SimpleLogin domains that user can use when creating a new alias, including:
|
||||
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
||||
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
||||
"""
|
||||
return [sl_domain.domain for sl_domain in self.get_sl_domains()]
|
||||
return [
|
||||
sl_domain.domain
|
||||
for sl_domain in self.get_sl_domains(alias_options=alias_options)
|
||||
]
|
||||
|
||||
def get_sl_domains(self) -> List["SLDomain"]:
|
||||
query = SLDomain.filter_by(hidden=False).order_by(SLDomain.order)
|
||||
def get_sl_domains(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> list["SLDomain"]:
|
||||
if alias_options is None:
|
||||
alias_options = AliasOptions()
|
||||
top_conds = [SLDomain.hidden == False] # noqa: E712
|
||||
or_conds = [] # noqa:E711
|
||||
if self.default_alias_public_domain_id is not None:
|
||||
default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
|
||||
if not self.is_premium():
|
||||
default_domain_conds.append(
|
||||
SLDomain.premium_only == False # noqa: E712
|
||||
)
|
||||
or_conds.append(and_(*default_domain_conds).self_group())
|
||||
if alias_options.show_partner_domains is not None:
|
||||
partner_user = PartnerUser.filter_by(
|
||||
user_id=self.id, partner_id=alias_options.show_partner_domains.id
|
||||
).first()
|
||||
if partner_user is not None:
|
||||
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
|
||||
if alias_options.show_partner_premium is None:
|
||||
alias_options.show_partner_premium = self.is_premium()
|
||||
if not alias_options.show_partner_premium:
|
||||
partner_domain_cond.append(
|
||||
SLDomain.premium_only == False # noqa: E712
|
||||
)
|
||||
or_conds.append(and_(*partner_domain_cond).self_group())
|
||||
if alias_options.show_sl_domains:
|
||||
sl_conds = [SLDomain.partner_id == None] # noqa: E711
|
||||
if not self.is_premium():
|
||||
sl_conds.append(SLDomain.premium_only == False) # noqa: E712
|
||||
or_conds.append(and_(*sl_conds).self_group())
|
||||
top_conds.append(or_(*or_conds))
|
||||
query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
|
||||
return query.all()
|
||||
|
||||
if self.is_premium():
|
||||
return query.all()
|
||||
else:
|
||||
return query.filter_by(premium_only=False).all()
|
||||
|
||||
def available_alias_domains(self) -> [str]:
|
||||
def available_alias_domains(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> [str]:
|
||||
"""return all domains that user can use when creating a new alias, including:
|
||||
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
||||
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
||||
- Verified custom domains
|
||||
|
||||
"""
|
||||
domains = self.available_sl_domains()
|
||||
domains = self.available_sl_domains(alias_options=alias_options)
|
||||
|
||||
for custom_domain in self.verified_custom_domains():
|
||||
domains.append(custom_domain.domain)
|
||||
@ -1001,16 +1110,28 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
> 0
|
||||
)
|
||||
|
||||
def get_random_alias_suffix(self):
|
||||
def get_random_alias_suffix(self, custom_domain: Optional["CustomDomain"] = None):
|
||||
"""Get random suffix for an alias based on user's preference.
|
||||
|
||||
Use a shorter suffix in case of custom domain
|
||||
|
||||
Returns:
|
||||
str: the random suffix generated
|
||||
"""
|
||||
if self.random_alias_suffix == AliasSuffixEnum.random_string.value:
|
||||
return random_string(config.ALIAS_RANDOM_SUFFIX_LENGTH, include_digits=True)
|
||||
return random_word()
|
||||
|
||||
if custom_domain is None:
|
||||
return random_words(1, 3)
|
||||
|
||||
return random_words(1)
|
||||
|
||||
def can_create_contacts(self) -> bool:
|
||||
if self.is_premium():
|
||||
return True
|
||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
|
||||
return True
|
||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||
|
||||
def __repr__(self):
|
||||
return f"<User {self.id} {self.name} {self.email}>"
|
||||
@ -1255,34 +1376,48 @@ class OauthToken(Base, ModelMixin):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
|
||||
def generate_email(
|
||||
def available_sl_email(email: str) -> bool:
|
||||
if (
|
||||
Alias.get_by(email=email)
|
||||
or Contact.get_by(reply_email=email)
|
||||
or DeletedAlias.get_by(email=email)
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def generate_random_alias_email(
|
||||
scheme: int = AliasGeneratorEnum.word.value,
|
||||
in_hex: bool = False,
|
||||
alias_domain=config.FIRST_ALIAS_DOMAIN,
|
||||
alias_domain: str = config.FIRST_ALIAS_DOMAIN,
|
||||
retries: int = 10,
|
||||
) -> str:
|
||||
"""generate an email address that does not exist before
|
||||
:param alias_domain: the domain used to generate the alias.
|
||||
:param scheme: int, value of AliasGeneratorEnum, indicate how the email is generated
|
||||
:param retries: int, How many times we can try to generate an alias in case of collision
|
||||
:type in_hex: bool, if the generate scheme is uuid, is hex favorable?
|
||||
"""
|
||||
if retries <= 0:
|
||||
raise Exception("Cannot generate alias after many retries")
|
||||
if scheme == AliasGeneratorEnum.uuid.value:
|
||||
name = uuid.uuid4().hex if in_hex else uuid.uuid4().__str__()
|
||||
random_email = name + "@" + alias_domain
|
||||
else:
|
||||
random_email = random_words() + "@" + alias_domain
|
||||
random_email = random_words(2, 3) + "@" + alias_domain
|
||||
|
||||
random_email = random_email.lower().strip()
|
||||
|
||||
# check that the client does not exist yet
|
||||
if not Alias.get_by(email=random_email) and not DeletedAlias.get_by(
|
||||
email=random_email
|
||||
):
|
||||
if available_sl_email(random_email):
|
||||
LOG.d("generate email %s", random_email)
|
||||
return random_email
|
||||
|
||||
# Rerun the function
|
||||
LOG.w("email %s already exists, generate a new email", random_email)
|
||||
return generate_email(scheme=scheme, in_hex=in_hex)
|
||||
return generate_random_alias_email(
|
||||
scheme=scheme, in_hex=in_hex, retries=retries - 1
|
||||
)
|
||||
|
||||
|
||||
class Alias(Base, ModelMixin):
|
||||
@ -1364,7 +1499,7 @@ class Alias(Base, ModelMixin):
|
||||
)
|
||||
|
||||
# have I been pwned
|
||||
hibp_last_check = sa.Column(ArrowType, default=None)
|
||||
hibp_last_check = sa.Column(ArrowType, default=None, index=True)
|
||||
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
|
||||
|
||||
# to use Postgres full text search. Only applied on "note" column for now
|
||||
@ -1373,6 +1508,8 @@ class Alias(Base, ModelMixin):
|
||||
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
|
||||
)
|
||||
|
||||
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
|
||||
# index on note column using pg_trgm
|
||||
@ -1391,7 +1528,8 @@ class Alias(Base, ModelMixin):
|
||||
def mailboxes(self):
|
||||
ret = [self.mailbox]
|
||||
for m in self._mailboxes:
|
||||
ret.append(m)
|
||||
if m.id is not self.mailbox.id:
|
||||
ret.append(m)
|
||||
|
||||
ret = [mb for mb in ret if mb.verified]
|
||||
ret = sorted(ret, key=lambda mb: mb.email)
|
||||
@ -1440,6 +1578,15 @@ class Alias(Base, ModelMixin):
|
||||
flush = kw.pop("flush", False)
|
||||
|
||||
new_alias = cls(**kw)
|
||||
user = User.get(new_alias.user_id)
|
||||
if user.is_premium():
|
||||
limits = config.ALIAS_CREATE_RATE_LIMIT_PAID
|
||||
else:
|
||||
limits = config.ALIAS_CREATE_RATE_LIMIT_FREE
|
||||
# limits is array of (hits,days)
|
||||
for limit in limits:
|
||||
key = f"alias_create_{limit[1]}d:{user.id}"
|
||||
rate_limiter.check_bucket_limit(key, limit[0], limit[1])
|
||||
|
||||
email = kw["email"]
|
||||
# make sure email is lowercase and doesn't have any whitespace
|
||||
@ -1481,7 +1628,7 @@ class Alias(Base, ModelMixin):
|
||||
suffix = user.get_random_alias_suffix()
|
||||
email = f"{prefix}.{suffix}@{config.FIRST_ALIAS_DOMAIN}"
|
||||
|
||||
if not cls.get_by(email=email) and not DeletedAlias.get_by(email=email):
|
||||
if available_sl_email(email):
|
||||
break
|
||||
|
||||
return Alias.create(
|
||||
@ -1510,7 +1657,7 @@ class Alias(Base, ModelMixin):
|
||||
|
||||
if user.default_alias_custom_domain_id:
|
||||
custom_domain = CustomDomain.get(user.default_alias_custom_domain_id)
|
||||
random_email = generate_email(
|
||||
random_email = generate_random_alias_email(
|
||||
scheme=scheme, in_hex=in_hex, alias_domain=custom_domain.domain
|
||||
)
|
||||
elif user.default_alias_public_domain_id:
|
||||
@ -1518,12 +1665,12 @@ class Alias(Base, ModelMixin):
|
||||
if sl_domain.premium_only and not user.is_premium():
|
||||
LOG.w("%s not premium, cannot use %s", user, sl_domain)
|
||||
else:
|
||||
random_email = generate_email(
|
||||
random_email = generate_random_alias_email(
|
||||
scheme=scheme, in_hex=in_hex, alias_domain=sl_domain.domain
|
||||
)
|
||||
|
||||
if not random_email:
|
||||
random_email = generate_email(scheme=scheme, in_hex=in_hex)
|
||||
random_email = generate_random_alias_email(scheme=scheme, in_hex=in_hex)
|
||||
|
||||
alias = Alias.create(
|
||||
user_id=user.id,
|
||||
@ -1557,7 +1704,9 @@ class ClientUser(Base, ModelMixin):
|
||||
client_id = sa.Column(sa.ForeignKey(Client.id, ondelete="cascade"), nullable=False)
|
||||
|
||||
# Null means client has access to user original email
|
||||
alias_id = sa.Column(sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=True)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=True, index=True
|
||||
)
|
||||
|
||||
# user can decide to send to client another name
|
||||
name = sa.Column(
|
||||
@ -1676,7 +1825,7 @@ class Contact(Base, ModelMixin):
|
||||
is_cc = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
pgp_public_key = sa.Column(sa.Text, nullable=True)
|
||||
pgp_finger_print = sa.Column(sa.String(512), nullable=True)
|
||||
pgp_finger_print = sa.Column(sa.String(512), nullable=True, index=True)
|
||||
|
||||
alias = orm.relationship(Alias, backref="contacts")
|
||||
user = orm.relationship(User)
|
||||
@ -1830,6 +1979,7 @@ class Contact(Base, ModelMixin):
|
||||
|
||||
class EmailLog(Base, ModelMixin):
|
||||
__tablename__ = "email_log"
|
||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
@ -1919,6 +2069,20 @@ class EmailLog(Base, ModelMixin):
|
||||
def get_dashboard_url(self):
|
||||
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
|
||||
|
||||
@classmethod
|
||||
def create(cls, *args, **kwargs):
|
||||
commit = kwargs.pop("commit", False)
|
||||
email_log = super().create(*args, **kwargs)
|
||||
Session.flush()
|
||||
if "alias_id" in kwargs:
|
||||
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
|
||||
Session.execute(
|
||||
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
|
||||
)
|
||||
if commit:
|
||||
Session.commit()
|
||||
return email_log
|
||||
|
||||
def __repr__(self):
|
||||
return f"<EmailLog {self.id}>"
|
||||
|
||||
@ -2087,7 +2251,9 @@ class AliasUsedOn(Base, ModelMixin):
|
||||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||
)
|
||||
|
||||
alias_id = sa.Column(sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||
|
||||
alias = orm.relationship(Alias)
|
||||
@ -2206,6 +2372,7 @@ class CustomDomain(Base, ModelMixin):
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
domain = kwargs.get("domain")
|
||||
kwargs["domain"] = domain.replace("\n", "")
|
||||
if DeletedSubdomain.get_by(domain=domain):
|
||||
raise SubdomainInTrashError
|
||||
|
||||
@ -2473,6 +2640,28 @@ class Mailbox(Base, ModelMixin):
|
||||
+ Alias.filter_by(mailbox_id=self.id).count()
|
||||
)
|
||||
|
||||
def is_proton(self) -> bool:
|
||||
if (
|
||||
self.email.endswith("@proton.me")
|
||||
or self.email.endswith("@protonmail.com")
|
||||
or self.email.endswith("@protonmail.ch")
|
||||
or self.email.endswith("@proton.ch")
|
||||
or self.email.endswith("@pm.me")
|
||||
):
|
||||
return True
|
||||
|
||||
from app.email_utils import get_email_local_part
|
||||
|
||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
||||
# Proton is the first domain
|
||||
if mx_domains and mx_domains[0][1] in (
|
||||
"mail.protonmail.ch.",
|
||||
"mailsec.protonmail.ch.",
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def delete(cls, obj_id):
|
||||
mailbox: Mailbox = cls.get(obj_id)
|
||||
@ -2505,6 +2694,12 @@ class Mailbox(Base, ModelMixin):
|
||||
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
if "email" in kw:
|
||||
kw["email"] = sanitize_email(kw["email"])
|
||||
return super().create(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Mailbox {self.id} {self.email}>"
|
||||
|
||||
@ -2764,6 +2959,31 @@ class Notification(Base, ModelMixin):
|
||||
)
|
||||
|
||||
|
||||
class Partner(Base, ModelMixin):
|
||||
__tablename__ = "partner"
|
||||
|
||||
name = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
contact_email = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
|
||||
@staticmethod
|
||||
def find_by_token(token: str) -> Optional[Partner]:
|
||||
hmaced = PartnerApiToken.hmac_token(token)
|
||||
res = (
|
||||
Session.query(Partner, PartnerApiToken)
|
||||
.filter(
|
||||
and_(
|
||||
PartnerApiToken.token == hmaced,
|
||||
Partner.id == PartnerApiToken.partner_id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if res:
|
||||
partner, partner_api_token = res
|
||||
return partner
|
||||
return None
|
||||
|
||||
|
||||
class SLDomain(Base, ModelMixin):
|
||||
"""SimpleLogin domains"""
|
||||
|
||||
@ -2781,12 +3001,23 @@ class SLDomain(Base, ModelMixin):
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
partner_id = sa.Column(
|
||||
sa.ForeignKey(Partner.id, ondelete="cascade"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
server_default="NULL",
|
||||
)
|
||||
|
||||
# if enabled, do not show this domain when user creates a custom alias
|
||||
hidden = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
# the order in which the domains are shown when user creates a custom alias
|
||||
order = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
||||
|
||||
use_as_reverse_alias = sa.Column(
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
|
||||
|
||||
@ -2807,6 +3038,8 @@ class Monitoring(Base, ModelMixin):
|
||||
active_queue = sa.Column(sa.Integer, nullable=False)
|
||||
deferred_queue = sa.Column(sa.Integer, nullable=False)
|
||||
|
||||
__table_args__ = (Index("ix_monitoring_created_at", "created_at"),)
|
||||
|
||||
|
||||
class BatchImport(Base, ModelMixin):
|
||||
__tablename__ = "batch_import"
|
||||
@ -2932,6 +3165,8 @@ class Bounce(Base, ModelMixin):
|
||||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||
info = sa.Column(sa.Text, nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_bounce_created_at", "created_at"),)
|
||||
|
||||
|
||||
class TransactionalEmail(Base, ModelMixin):
|
||||
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
|
||||
@ -2941,6 +3176,22 @@ class TransactionalEmail(Base, ModelMixin):
|
||||
__tablename__ = "transactional_email"
|
||||
email = sa.Column(sa.String(256), nullable=False, unique=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
# whether to call Session.commit
|
||||
commit = kw.pop("commit", False)
|
||||
|
||||
r = cls(**kw)
|
||||
if not config.STORE_TRANSACTIONAL_EMAILS:
|
||||
return r
|
||||
|
||||
Session.add(r)
|
||||
if commit:
|
||||
Session.commit()
|
||||
return r
|
||||
|
||||
|
||||
class Payout(Base, ModelMixin):
|
||||
"""Referral payouts"""
|
||||
@ -2993,7 +3244,7 @@ class MessageIDMatching(Base, ModelMixin):
|
||||
|
||||
# to track what email_log that has created this matching
|
||||
email_log_id = sa.Column(
|
||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True
|
||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
|
||||
)
|
||||
|
||||
email_log = orm.relationship("EmailLog")
|
||||
@ -3131,6 +3382,15 @@ class AdminAuditLog(Base):
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def stop_trial(cls, admin_user_id: int, user_id: int):
|
||||
cls.create(
|
||||
admin_user_id=admin_user_id,
|
||||
action=AuditLogActionEnum.stop_trial.value,
|
||||
model="User",
|
||||
model_id=user_id,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def disable_otp_fido(
|
||||
cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool
|
||||
@ -3227,31 +3487,6 @@ class ProviderComplaint(Base, ModelMixin):
|
||||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||
|
||||
|
||||
class Partner(Base, ModelMixin):
|
||||
__tablename__ = "partner"
|
||||
|
||||
name = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
contact_email = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
|
||||
@staticmethod
|
||||
def find_by_token(token: str) -> Optional[Partner]:
|
||||
hmaced = PartnerApiToken.hmac_token(token)
|
||||
res = (
|
||||
Session.query(Partner, PartnerApiToken)
|
||||
.filter(
|
||||
and_(
|
||||
PartnerApiToken.token == hmaced,
|
||||
Partner.id == PartnerApiToken.partner_id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if res:
|
||||
partner, partner_api_token = res
|
||||
return partner
|
||||
return None
|
||||
|
||||
|
||||
class PartnerApiToken(Base, ModelMixin):
|
||||
__tablename__ = "partner_api_token"
|
||||
|
||||
@ -3321,7 +3556,7 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
)
|
||||
|
||||
# when the partner subscription ends
|
||||
end_at = sa.Column(ArrowType, nullable=False)
|
||||
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
||||
|
||||
partner_user = orm.relationship(PartnerUser)
|
||||
|
||||
@ -3351,7 +3586,7 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
|
||||
class Newsletter(Base, ModelMixin):
|
||||
__tablename__ = "newsletter"
|
||||
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||
subject = sa.Column(sa.String(), nullable=False, index=True)
|
||||
|
||||
html = sa.Column(sa.Text)
|
||||
plain_text = sa.Column(sa.Text)
|
||||
|
@ -1 +1,3 @@
|
||||
from . import views
|
||||
|
||||
__all__ = ["views"]
|
||||
|
@ -1 +1,3 @@
|
||||
from .views import authorize, token, user_info
|
||||
|
||||
__all__ = ["authorize", "token", "user_info"]
|
||||
|
@ -140,7 +140,7 @@ def authorize():
|
||||
Scope=Scope,
|
||||
)
|
||||
else: # POST - user allows or denies
|
||||
if not current_user.is_authenticated or not current_user.is_active:
|
||||
if not current_user.is_authenticated or not current_user.is_active():
|
||||
LOG.i(
|
||||
"Attempt to validate a OAUth allow request by an unauthenticated user"
|
||||
)
|
||||
|
@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
|
||||
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
||||
"""
|
||||
res = set()
|
||||
if type(arg_input) is str:
|
||||
if isinstance(arg_input, str):
|
||||
if " " in arg_input:
|
||||
for x in arg_input.split(" "):
|
||||
if x:
|
||||
|
@ -5,3 +5,11 @@ from .views import (
|
||||
account_activated,
|
||||
extension_redirect,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"final",
|
||||
"setup_done",
|
||||
"account_activated",
|
||||
"extension_redirect",
|
||||
]
|
||||
|
@ -39,7 +39,6 @@ class _InnerLock:
|
||||
lock_redis.storage.delete(lock_name)
|
||||
|
||||
def __call__(self, f: Callable[..., Any]):
|
||||
|
||||
if self.lock_suffix is None:
|
||||
lock_suffix = f.__name__
|
||||
else:
|
||||
|
@ -5,3 +5,11 @@ from .views import (
|
||||
provider1_callback,
|
||||
provider2_callback,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"phone_reservation",
|
||||
"twilio_callback",
|
||||
"provider1_callback",
|
||||
"provider2_callback",
|
||||
]
|
||||
|
@ -7,11 +7,12 @@ from typing import Optional
|
||||
|
||||
from app.account_linking import SLPlan, SLPlanType
|
||||
from app.config import PROTON_EXTRA_HEADER_NAME, PROTON_EXTRA_HEADER_VALUE
|
||||
from app.errors import ProtonAccountNotVerified
|
||||
from app.log import LOG
|
||||
|
||||
_APP_VERSION = "OauthClient_1.0.0"
|
||||
|
||||
PROTON_ERROR_CODE_NOT_EXISTS = 2501
|
||||
PROTON_ERROR_CODE_HV_NEEDED = 9001
|
||||
|
||||
PLAN_FREE = 1
|
||||
PLAN_PREMIUM = 2
|
||||
@ -57,6 +58,15 @@ def convert_access_token(access_token_response: str) -> AccessCredentials:
|
||||
)
|
||||
|
||||
|
||||
def handle_response_not_ok(status: int, body: dict, text: str) -> Exception:
|
||||
if status == HTTPStatus.UNPROCESSABLE_ENTITY:
|
||||
res_code = body.get("Code")
|
||||
if res_code == PROTON_ERROR_CODE_HV_NEEDED:
|
||||
return ProtonAccountNotVerified()
|
||||
|
||||
return Exception(f"Unexpected status code. Wanted 200 and got {status}: " + text)
|
||||
|
||||
|
||||
class ProtonClient(ABC):
|
||||
@abstractmethod
|
||||
def get_user(self) -> Optional[UserInformation]:
|
||||
@ -124,11 +134,11 @@ class HttpProtonClient(ProtonClient):
|
||||
@staticmethod
|
||||
def __validate_response(res: Response) -> dict:
|
||||
status = res.status_code
|
||||
if status != HTTPStatus.OK:
|
||||
raise Exception(
|
||||
f"Unexpected status code. Wanted 200 and got {status}: " + res.text
|
||||
)
|
||||
as_json = res.json()
|
||||
if status != HTTPStatus.OK:
|
||||
raise HttpProtonClient.__handle_response_not_ok(
|
||||
status=status, body=as_json, text=res.text
|
||||
)
|
||||
res_code = as_json.get("Code")
|
||||
if not res_code or res_code != 1000:
|
||||
raise Exception(
|
||||
|
40
app/app/rate_limiter.py
Normal file
40
app/app/rate_limiter.py
Normal file
@ -0,0 +1,40 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import newrelic.agent
|
||||
import redis.exceptions
|
||||
import werkzeug.exceptions
|
||||
from limits.storage import RedisStorage
|
||||
|
||||
from app.log import LOG
|
||||
|
||||
lock_redis: Optional[RedisStorage] = None
|
||||
|
||||
|
||||
def set_redis_concurrent_lock(redis: RedisStorage):
|
||||
global lock_redis
|
||||
lock_redis = redis
|
||||
|
||||
|
||||
def check_bucket_limit(
|
||||
lock_name: Optional[str] = None,
|
||||
max_hits: int = 5,
|
||||
bucket_seconds: int = 3600,
|
||||
):
|
||||
# Calculate current bucket time
|
||||
int_time = int(datetime.utcnow().timestamp())
|
||||
bucket_id = int_time - (int_time % bucket_seconds)
|
||||
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
|
||||
if not lock_redis:
|
||||
return
|
||||
try:
|
||||
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
|
||||
if value > max_hits:
|
||||
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
|
||||
newrelic.agent.record_custom_event(
|
||||
"BucketRateLimit",
|
||||
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
|
||||
)
|
||||
raise werkzeug.exceptions.TooManyRequests()
|
||||
except (redis.exceptions.RedisError, AttributeError):
|
||||
LOG.e("Cannot connect to redis")
|
@ -2,21 +2,23 @@ import flask
|
||||
import limits.storage
|
||||
|
||||
from app.parallel_limiter import set_redis_concurrent_lock
|
||||
from app.rate_limiter import set_redis_concurrent_lock as rate_limit_set_redis
|
||||
from app.session import RedisSessionStore
|
||||
|
||||
|
||||
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
||||
|
||||
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
||||
storage = limits.storage.RedisStorage(redis_url)
|
||||
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
||||
set_redis_concurrent_lock(storage)
|
||||
rate_limit_set_redis(storage)
|
||||
elif redis_url.startswith("redis+sentinel://"):
|
||||
storage = limits.storage.RedisSentinelStorage(redis_url)
|
||||
app.session_interface = RedisSessionStore(
|
||||
storage.storage, storage.storage_slave, app
|
||||
)
|
||||
set_redis_concurrent_lock(storage)
|
||||
rate_limit_set_redis(storage)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Tried to set_redis_session with an invalid redis url: ${redis_url}"
|
||||
|
@ -13,17 +13,29 @@ from app.config import (
|
||||
LOCAL_FILE_UPLOAD,
|
||||
UPLOAD_DIR,
|
||||
URL,
|
||||
AWS_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
if not LOCAL_FILE_UPLOAD:
|
||||
_session = boto3.Session(
|
||||
aws_access_key_id=AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
|
||||
region_name=AWS_REGION,
|
||||
)
|
||||
from app.log import LOG
|
||||
|
||||
|
||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
||||
_s3_client = None
|
||||
|
||||
|
||||
def _get_s3client():
|
||||
global _s3_client
|
||||
if _s3_client is None:
|
||||
args = {
|
||||
"aws_access_key_id": AWS_ACCESS_KEY_ID,
|
||||
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
|
||||
"region_name": AWS_REGION,
|
||||
}
|
||||
if AWS_ENDPOINT_URL:
|
||||
args["endpoint_url"] = AWS_ENDPOINT_URL
|
||||
_s3_client = boto3.client("s3", **args)
|
||||
return _s3_client
|
||||
|
||||
|
||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
||||
bs.seek(0)
|
||||
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
@ -34,7 +46,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
||||
f.write(bs.read())
|
||||
|
||||
else:
|
||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
||||
_get_s3client().put_object(
|
||||
Bucket=BUCKET,
|
||||
Key=key,
|
||||
Body=bs,
|
||||
ContentType=content_type,
|
||||
@ -52,7 +65,8 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
||||
f.write(bs.read())
|
||||
|
||||
else:
|
||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
||||
_get_s3client().put_object(
|
||||
Bucket=BUCKET,
|
||||
Key=path,
|
||||
Body=bs,
|
||||
# Support saving a remote file using Http header
|
||||
@ -67,12 +81,9 @@ def download_email(path: str) -> Optional[str]:
|
||||
file_path = os.path.join(UPLOAD_DIR, path)
|
||||
with open(file_path, "rb") as f:
|
||||
return f.read()
|
||||
resp = (
|
||||
_session.resource("s3")
|
||||
.Bucket(BUCKET)
|
||||
.get_object(
|
||||
Key=path,
|
||||
)
|
||||
resp = _get_s3client().get_object(
|
||||
Bucket=BUCKET,
|
||||
Key=path,
|
||||
)
|
||||
if not resp or "Body" not in resp:
|
||||
return None
|
||||
@ -88,8 +99,7 @@ def get_url(key: str, expires_in=3600) -> str:
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
return URL + "/static/upload/" + key
|
||||
else:
|
||||
s3_client = _session.client("s3")
|
||||
return s3_client.generate_presigned_url(
|
||||
return _get_s3client().generate_presigned_url(
|
||||
ExpiresIn=expires_in,
|
||||
ClientMethod="get_object",
|
||||
Params={"Bucket": BUCKET, "Key": key},
|
||||
@ -100,5 +110,15 @@ def delete(path: str):
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
os.remove(os.path.join(UPLOAD_DIR, path))
|
||||
else:
|
||||
o = _session.resource("s3").Bucket(BUCKET).Object(path)
|
||||
o.delete()
|
||||
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
|
||||
|
||||
|
||||
def create_bucket_if_not_exists():
|
||||
s3client = _get_s3client()
|
||||
buckets = s3client.list_buckets()
|
||||
for bucket in buckets["Buckets"]:
|
||||
if bucket["Name"] == BUCKET:
|
||||
LOG.i("Bucket already exists")
|
||||
return
|
||||
s3client.create_bucket(Bucket=BUCKET)
|
||||
LOG.i(f"Bucket {BUCKET} created")
|
||||
|
@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
|
||||
try:
|
||||
data = pickle.loads(val)
|
||||
return ServerSession(data, session_id=session_id)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return ServerSession(session_id=str(uuid.uuid4()))
|
||||
|
||||
|
33
app/app/subscription_webhook.py
Normal file
33
app/app/subscription_webhook.py
Normal file
@ -0,0 +1,33 @@
|
||||
import requests
|
||||
from requests import RequestException
|
||||
|
||||
from app import config
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
|
||||
|
||||
def execute_subscription_webhook(user: User):
|
||||
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
|
||||
if webhook_url is None:
|
||||
return
|
||||
subscription_end = user.get_active_subscription_end(
|
||||
include_partner_subscription=False
|
||||
)
|
||||
sl_subscription_end = None
|
||||
if subscription_end:
|
||||
sl_subscription_end = subscription_end.timestamp
|
||||
payload = {
|
||||
"user_id": user.id,
|
||||
"is_premium": user.is_premium(),
|
||||
"active_subscription_end": sl_subscription_end,
|
||||
}
|
||||
try:
|
||||
response = requests.post(webhook_url, json=payload, timeout=2)
|
||||
if response.status_code == 200:
|
||||
LOG.i("Sent request to subscription update webhook successfully")
|
||||
else:
|
||||
LOG.i(
|
||||
f"Request to webhook failed with statue {response.status_code}: {response.text}"
|
||||
)
|
||||
except RequestException as e:
|
||||
LOG.error(f"Subscription request exception: {e}")
|
@ -1,3 +1,4 @@
|
||||
import random
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
@ -25,11 +26,16 @@ def word_exist(word):
|
||||
return word in _words
|
||||
|
||||
|
||||
def random_words():
|
||||
def random_words(words: int = 2, numbers: int = 0):
|
||||
"""Generate a random words. Used to generate user-facing string, for ex email addresses"""
|
||||
# nb_words = random.randint(2, 3)
|
||||
nb_words = 2
|
||||
return "_".join([secrets.choice(_words) for i in range(nb_words)])
|
||||
fields = [secrets.choice(_words) for i in range(words)]
|
||||
|
||||
if numbers > 0:
|
||||
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)])
|
||||
return "_".join(fields) + digits
|
||||
else:
|
||||
return "_".join(fields)
|
||||
|
||||
|
||||
def random_string(length=10, include_digits=False):
|
||||
@ -43,11 +49,11 @@ def random_string(length=10, include_digits=False):
|
||||
|
||||
def convert_to_id(s: str):
|
||||
"""convert a string to id-like: remove space, remove special accent"""
|
||||
s = s.replace(" ", "")
|
||||
s = s.lower()
|
||||
s = unidecode(s)
|
||||
s = s.replace(" ", "")
|
||||
|
||||
return s
|
||||
return s[:256]
|
||||
|
||||
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-."
|
||||
@ -93,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
|
||||
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
||||
if not not_lower:
|
||||
email_address = email_address.lower()
|
||||
return email_address
|
||||
return email_address.replace("\u200f", "")
|
||||
|
||||
|
||||
class NextUrlSanitizer:
|
||||
|
139
app/cron.py
139
app/cron.py
@ -5,11 +5,11 @@ from typing import List, Tuple
|
||||
|
||||
import arrow
|
||||
import requests
|
||||
from sqlalchemy import func, desc, or_
|
||||
from sqlalchemy import func, desc, or_, and_, nullsfirst
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||
from sqlalchemy.sql import Insert
|
||||
from sqlalchemy.sql import Insert, text
|
||||
|
||||
from app import s3, config
|
||||
from app.alias_utils import nb_email_log_for_mailbox
|
||||
@ -22,10 +22,9 @@ from app.email_utils import (
|
||||
render,
|
||||
email_can_be_used_as_mailbox,
|
||||
send_email_with_rate_control,
|
||||
normalize_reply_email,
|
||||
is_valid_email,
|
||||
get_email_domain_part,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.log import LOG
|
||||
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
||||
@ -63,15 +62,19 @@ from app.proton.utils import get_proton_partner
|
||||
from app.utils import sanitize_email
|
||||
from server import create_light_app
|
||||
|
||||
DELETE_GRACE_DAYS = 30
|
||||
|
||||
|
||||
def notify_trial_end():
|
||||
for user in User.filter(
|
||||
User.activated.is_(True), User.trial_end.isnot(None), User.lifetime.is_(False)
|
||||
User.activated.is_(True),
|
||||
User.trial_end.isnot(None),
|
||||
User.trial_end >= arrow.now().shift(days=2),
|
||||
User.trial_end < arrow.now().shift(days=3),
|
||||
User.lifetime.is_(False),
|
||||
).all():
|
||||
try:
|
||||
if user.in_trial() and arrow.now().shift(
|
||||
days=3
|
||||
) > user.trial_end >= arrow.now().shift(days=2):
|
||||
if user.in_trial():
|
||||
LOG.d("Send trial end email to user %s", user)
|
||||
send_trial_end_soon_email(user)
|
||||
# happens if user has been deleted in the meantime
|
||||
@ -84,27 +87,49 @@ def delete_logs():
|
||||
delete_refused_emails()
|
||||
delete_old_monitoring()
|
||||
|
||||
for t in TransactionalEmail.filter(
|
||||
for t_email in TransactionalEmail.filter(
|
||||
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
||||
):
|
||||
TransactionalEmail.delete(t.id)
|
||||
TransactionalEmail.delete(t_email.id)
|
||||
|
||||
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
||||
Bounce.delete(b.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Delete EmailLog older than 2 weeks")
|
||||
LOG.d("Deleting EmailLog older than 2 weeks")
|
||||
|
||||
max_dt = arrow.now().shift(weeks=-2)
|
||||
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete()
|
||||
Session.commit()
|
||||
total_deleted = 0
|
||||
batch_size = 500
|
||||
Session.execute("set session statement_timeout=30000").rowcount
|
||||
queries_done = 0
|
||||
cutoff_time = arrow.now().shift(days=-14)
|
||||
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
|
||||
expected_queries = int(rows_to_delete / batch_size)
|
||||
sql = text(
|
||||
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
|
||||
)
|
||||
str_cutoff_time = cutoff_time.isoformat()
|
||||
while total_deleted < rows_to_delete:
|
||||
deleted_count = Session.execute(
|
||||
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
|
||||
).rowcount
|
||||
Session.commit()
|
||||
total_deleted += deleted_count
|
||||
queries_done += 1
|
||||
LOG.i(
|
||||
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
|
||||
)
|
||||
if deleted_count < batch_size:
|
||||
break
|
||||
|
||||
LOG.i("Delete %s email logs", nb_deleted)
|
||||
LOG.i("Deleted %s email logs", total_deleted)
|
||||
|
||||
|
||||
def delete_refused_emails():
|
||||
for refused_email in RefusedEmail.filter_by(deleted=False).all():
|
||||
for refused_email in (
|
||||
RefusedEmail.filter_by(deleted=False).order_by(RefusedEmail.id).all()
|
||||
):
|
||||
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
|
||||
LOG.d("Delete refused email %s", refused_email)
|
||||
if refused_email.path:
|
||||
@ -138,7 +163,7 @@ def notify_premium_end():
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your subscription will end soon",
|
||||
"Your subscription will end soon",
|
||||
render(
|
||||
"transactional/subscription-end.txt",
|
||||
user=user,
|
||||
@ -195,7 +220,7 @@ def notify_manual_sub_end():
|
||||
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your subscription will end soon",
|
||||
"Your subscription will end soon",
|
||||
render(
|
||||
"transactional/manual-subscription-end.txt",
|
||||
user=user,
|
||||
@ -272,7 +297,11 @@ def compute_metric2() -> Metric2:
|
||||
_24h_ago = now.shift(days=-1)
|
||||
|
||||
nb_referred_user_paid = 0
|
||||
for user in User.filter(User.referral_id.isnot(None)):
|
||||
for user in (
|
||||
User.filter(User.referral_id.isnot(None))
|
||||
.yield_per(500)
|
||||
.enable_eagerloads(False)
|
||||
):
|
||||
if user.is_paid():
|
||||
nb_referred_user_paid += 1
|
||||
|
||||
@ -563,21 +592,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
|
||||
"""
|
||||
|
||||
monitoring_report += "\n====================================\n"
|
||||
monitoring_report += f"""
|
||||
monitoring_report += """
|
||||
# Account bounce report:
|
||||
"""
|
||||
|
||||
for email, bounces in bounce_report():
|
||||
monitoring_report += f"{email}: {bounces}\n"
|
||||
|
||||
monitoring_report += f"""\n
|
||||
monitoring_report += """\n
|
||||
# Alias creation report:
|
||||
"""
|
||||
|
||||
for email, nb_alias, date in alias_creation_report():
|
||||
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
||||
|
||||
monitoring_report += f"""\n
|
||||
monitoring_report += """\n
|
||||
# Full bounce detail report:
|
||||
"""
|
||||
monitoring_report += all_bounce_report()
|
||||
@ -933,6 +962,9 @@ async def _hibp_check(api_key, queue):
|
||||
|
||||
This function to be ran simultaneously (multiple _hibp_check functions with different keys on the same queue) to make maximum use of multiple API keys.
|
||||
"""
|
||||
default_rate_sleep = (60.0 / config.HIBP_RPM) + 0.1
|
||||
rate_sleep = default_rate_sleep
|
||||
rate_hit_counter = 0
|
||||
while True:
|
||||
try:
|
||||
alias_id = queue.get_nowait()
|
||||
@ -940,9 +972,11 @@ async def _hibp_check(api_key, queue):
|
||||
return
|
||||
|
||||
alias = Alias.get(alias_id)
|
||||
# an alias can be deleted in the meantime
|
||||
if not alias:
|
||||
return
|
||||
continue
|
||||
user = alias.user
|
||||
if user.disabled or not user.is_paid():
|
||||
continue
|
||||
|
||||
LOG.d("Checking HIBP for %s", alias)
|
||||
|
||||
@ -954,7 +988,6 @@ async def _hibp_check(api_key, queue):
|
||||
f"https://haveibeenpwned.com/api/v3/breachedaccount/{urllib.parse.quote(alias.email)}",
|
||||
headers=request_headers,
|
||||
)
|
||||
|
||||
if r.status_code == 200:
|
||||
# Breaches found
|
||||
alias.hibp_breaches = [
|
||||
@ -962,20 +995,27 @@ async def _hibp_check(api_key, queue):
|
||||
]
|
||||
if len(alias.hibp_breaches) > 0:
|
||||
LOG.w("%s appears in HIBP breaches %s", alias, alias.hibp_breaches)
|
||||
if rate_hit_counter > 0:
|
||||
rate_hit_counter -= 1
|
||||
elif r.status_code == 404:
|
||||
# No breaches found
|
||||
alias.hibp_breaches = []
|
||||
elif r.status_code == 429:
|
||||
# rate limited
|
||||
LOG.w("HIBP rate limited, check alias %s in the next run", alias)
|
||||
await asyncio.sleep(1.6)
|
||||
return
|
||||
rate_hit_counter += 1
|
||||
rate_sleep = default_rate_sleep + (0.2 * rate_hit_counter)
|
||||
if rate_hit_counter > 10:
|
||||
LOG.w(f"HIBP rate limited too many times stopping with alias {alias}")
|
||||
return
|
||||
# Just sleep for a while
|
||||
asyncio.sleep(5)
|
||||
elif r.status_code > 500:
|
||||
LOG.w("HIBP server 5** error %s", r.status_code)
|
||||
return
|
||||
else:
|
||||
LOG.error(
|
||||
"An error occured while checking alias %s: %s - %s",
|
||||
"An error occurred while checking alias %s: %s - %s",
|
||||
alias,
|
||||
r.status_code,
|
||||
r.text,
|
||||
@ -986,9 +1026,8 @@ async def _hibp_check(api_key, queue):
|
||||
Session.add(alias)
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Updated breaches info for %s", alias)
|
||||
|
||||
await asyncio.sleep(1.6)
|
||||
LOG.d("Updated breach info for %s", alias)
|
||||
await asyncio.sleep(rate_sleep)
|
||||
|
||||
|
||||
async def check_hibp():
|
||||
@ -1011,16 +1050,24 @@ async def check_hibp():
|
||||
Session.commit()
|
||||
LOG.d("Updated list of known breaches")
|
||||
|
||||
LOG.d("Getting the list of users to skip")
|
||||
query = "select u.id, count(a.id) from users u, alias a where a.user_id=u.id group by u.id having count(a.id) > :max_alias"
|
||||
rows = Session.execute(query, {"max_alias": config.HIBP_MAX_ALIAS_CHECK})
|
||||
user_ids = [row[0] for row in rows]
|
||||
LOG.d("Got %d users to skip" % len(user_ids))
|
||||
|
||||
LOG.d("Preparing list of aliases to check")
|
||||
queue = asyncio.Queue()
|
||||
max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
|
||||
for alias in (
|
||||
Alias.filter(
|
||||
or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date)
|
||||
or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date),
|
||||
Alias.user_id.notin_(user_ids),
|
||||
)
|
||||
.filter(Alias.enabled)
|
||||
.order_by(Alias.hibp_last_check.asc())
|
||||
.all()
|
||||
.order_by(nullsfirst(Alias.hibp_last_check.asc()), Alias.id.asc())
|
||||
.yield_per(500)
|
||||
.enable_eagerloads(False)
|
||||
):
|
||||
await queue.put(alias.id)
|
||||
|
||||
@ -1071,14 +1118,14 @@ def notify_hibp():
|
||||
)
|
||||
|
||||
LOG.d(
|
||||
f"Send new breaches found email to %s for %s breaches aliases",
|
||||
"Send new breaches found email to %s for %s breaches aliases",
|
||||
user,
|
||||
len(breached_aliases),
|
||||
)
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"You were in a data breach",
|
||||
"You were in a data breach",
|
||||
render(
|
||||
"transactional/hibp-new-breaches.txt.jinja2",
|
||||
user=user,
|
||||
@ -1098,6 +1145,23 @@ def notify_hibp():
|
||||
Session.commit()
|
||||
|
||||
|
||||
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||
users = User.filter(
|
||||
and_(
|
||||
User.delete_on.isnot(None),
|
||||
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
||||
)
|
||||
).all()
|
||||
for user in users:
|
||||
LOG.i(
|
||||
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
|
||||
)
|
||||
if dry_run:
|
||||
continue
|
||||
User.delete(user.id)
|
||||
Session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
LOG.d("Start running cronjob")
|
||||
parser = argparse.ArgumentParser()
|
||||
@ -1164,3 +1228,6 @@ if __name__ == "__main__":
|
||||
elif args.job == "send_undelivered_mails":
|
||||
LOG.d("Sending undelivered emails")
|
||||
load_unsent_mails_from_fs_and_resend()
|
||||
elif args.job == "delete_scheduled_users":
|
||||
LOG.d("Deleting users scheduled to be deleted")
|
||||
clear_users_scheduled_to_be_deleted(dry_run=True)
|
||||
|
@ -5,65 +5,66 @@ jobs:
|
||||
schedule: "0 0 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Trial Ends
|
||||
command: python /code/cron.py -j notify_trial_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 8 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Manual Subscription Ends
|
||||
command: python /code/cron.py -j notify_manual_subscription_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 9 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Premium Ends
|
||||
command: python /code/cron.py -j notify_premium_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 10 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
schedule: "0 11 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
schedule: "0 12 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Sanity Check
|
||||
command: python /code/cron.py -j sanity_check
|
||||
shell: /bin/bash
|
||||
schedule: "0 2 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Old Monitoring records
|
||||
command: python /code/cron.py -j delete_old_monitoring
|
||||
shell: /bin/bash
|
||||
schedule: "0 14 * * *"
|
||||
schedule: "15 1 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Custom Domain check
|
||||
command: python /code/cron.py -j check_custom_domain
|
||||
shell: /bin/bash
|
||||
schedule: "0 15 * * *"
|
||||
schedule: "15 2 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin HIBP check
|
||||
command: python /code/cron.py -j check_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "0 18 * * *"
|
||||
schedule: "15 3 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin Notify HIBP breaches
|
||||
command: python /code/cron.py -j notify_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "0 19 * * *"
|
||||
schedule: "15 4 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
schedule: "15 5 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
schedule: "15 6 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Trial Ends
|
||||
command: python /code/cron.py -j notify_trial_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 8 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Manual Subscription Ends
|
||||
command: python /code/cron.py -j notify_manual_subscription_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 9 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Premium Ends
|
||||
command: python /code/cron.py -j notify_premium_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 10 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin delete users scheduled to be deleted
|
||||
command: python /code/cron.py -j delete_scheduled_users
|
||||
shell: /bin/bash
|
||||
schedule: "15 11 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
- [GET /api/user/cookie_token](#get-apiusercookie_token): Get a one time use token to exchange it for a valid cookie
|
||||
- [PATCH /api/user_info](#patch-apiuser_info): Update user's information.
|
||||
- [POST /api/api_key](#post-apiapi_key): Create a new API key.
|
||||
- [GET /api/stats](#get-apistats): Get user's stats.
|
||||
- [GET /api/logout](#get-apilogout): Log out.
|
||||
|
||||
[Alias endpoints](#alias-endpoints)
|
||||
@ -226,6 +227,22 @@ Input:
|
||||
|
||||
Output: same as GET /api/user_info
|
||||
|
||||
#### GET /api/stats
|
||||
|
||||
Given the API Key, return stats about the number of aliases, number of emails forwarded/replied/blocked
|
||||
|
||||
Input:
|
||||
|
||||
- `Authentication` header that contains the api key
|
||||
|
||||
Output: if api key is correct, return a json with the following fields:
|
||||
|
||||
```json
|
||||
{"nb_alias": 1, "nb_block": 0, "nb_forward": 0, "nb_reply": 0}
|
||||
```
|
||||
|
||||
If api key is incorrect, return 401.
|
||||
|
||||
#### PATCH /api/sudo
|
||||
|
||||
Enable sudo mode
|
||||
@ -371,7 +388,7 @@ Input:
|
||||
- (Optional but recommended) `hostname` passed in query string
|
||||
- Request Message Body in json (`Content-Type` is `application/json`)
|
||||
- alias_prefix: string. The first part of the alias that user can choose.
|
||||
- signed_suffix: should be one of the suffixes returned in the `GET /api/v4/alias/options` endpoint.
|
||||
- signed_suffix: should be one of the suffixes returned in the `GET /api/v5/alias/options` endpoint.
|
||||
- mailbox_ids: list of mailbox_id that "owns" this alias
|
||||
- (Optional) note: alias note
|
||||
- (Optional) name: alias name
|
||||
@ -694,7 +711,7 @@ Return 200 and `existed=true` if contact is already added.
|
||||
|
||||
It can return 403 with an error if the user cannot create reverse alias.
|
||||
|
||||
``json
|
||||
```json
|
||||
{
|
||||
"error": "Please upgrade to create a reverse-alias"
|
||||
}
|
||||
|
123
app/docs/ssl.md
123
app/docs/ssl.md
@ -1,4 +1,4 @@
|
||||
# SSL, HTTPS, and HSTS
|
||||
# SSL, HTTPS, HSTS and additional security measures
|
||||
|
||||
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
|
||||
|
||||
@ -58,3 +58,124 @@ Now, reload Nginx:
|
||||
```bash
|
||||
sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
## Additional security measures
|
||||
|
||||
For additional security, we recommend you take some extra steps.
|
||||
|
||||
### Enable Certificate Authority Authorization (CAA)
|
||||
|
||||
[Certificate Authority Authorization](https://letsencrypt.org/docs/caa/) is a step you can take to restrict the list of certificate authorities that are allowed to issue certificates for your domains.
|
||||
|
||||
Use [SSLMate’s CAA Record Generator](https://sslmate.com/caa/) to create a **CAA record** with the following configuration:
|
||||
|
||||
- `flags`: `0`
|
||||
- `tag`: `issue`
|
||||
- `value`: `"letsencrypt.org"`
|
||||
|
||||
To verify if the DNS works, the following command
|
||||
|
||||
```bash
|
||||
dig @1.1.1.1 mydomain.com caa
|
||||
```
|
||||
|
||||
should return:
|
||||
|
||||
```
|
||||
mydomain.com. 3600 IN CAA 0 issue "letsencrypt.org"
|
||||
```
|
||||
|
||||
### SMTP MTA Strict Transport Security (MTA-STS)
|
||||
|
||||
[MTA-STS](https://datatracker.ietf.org/doc/html/rfc8461) is an extra step you can take to broadcast the ability of your instance to receive and, optionally enforce, TSL-secure SMTP connections to protect email traffic.
|
||||
|
||||
Enabling MTA-STS requires you serve a specific file from subdomain `mta-sts.domain.com` on a well-known route.
|
||||
|
||||
Create a text file `/var/www/.well-known/mta-sts.txt` with the content:
|
||||
|
||||
```txt
|
||||
version: STSv1
|
||||
mode: testing
|
||||
mx: app.mydomain.com
|
||||
max_age: 86400
|
||||
```
|
||||
|
||||
It is recommended to start with `mode: testing` for starters to get time to review failure reports. Add as many `mx:` domain entries as you have matching **MX records** in your DNS configuration.
|
||||
|
||||
Create a **TXT record** for `_mta-sts.mydomain.com.` with the following value:
|
||||
|
||||
```txt
|
||||
v=STSv1; id=UNIX_TIMESTAMP
|
||||
```
|
||||
|
||||
With `UNIX_TIMESTAMP` being the current date/time.
|
||||
|
||||
Use the following command to generate the record:
|
||||
|
||||
```bash
|
||||
echo "v=STSv1; id=$(date +%s)"
|
||||
```
|
||||
|
||||
To verify if the DNS works, the following command
|
||||
|
||||
```bash
|
||||
dig @1.1.1.1 _mta-sts.mydomain.com txt
|
||||
```
|
||||
|
||||
should return a result similar to this one:
|
||||
|
||||
```
|
||||
_mta-sts.mydomain.com. 3600 IN TXT "v=STSv1; id=1689416399"
|
||||
```
|
||||
|
||||
Create an additional Nginx configuration in `/etc/nginx/sites-enabled/mta-sts` with the following content:
|
||||
|
||||
```
|
||||
server {
|
||||
server_name mta-sts.mydomain.com;
|
||||
root /var/www;
|
||||
listen 80;
|
||||
|
||||
location ^~ /.well-known {}
|
||||
}
|
||||
```
|
||||
|
||||
Restart Nginx with the following command:
|
||||
|
||||
```sh
|
||||
sudo service nginx restart
|
||||
```
|
||||
|
||||
A correct configuration of MTA-STS, however, requires that the certificate used to host the `mta-sts` subdomain matches that of the subdomain referred to by the **MX record** from the DNS. In other words, both `mta-sts.mydomain.com` and `app.mydomain.com` must share the same certificate.
|
||||
|
||||
The easiest way to do this is to _expand_ the certificate associated with `app.mydomain.com` to also support the `mta-sts` subdomain using the following command:
|
||||
|
||||
```sh
|
||||
certbot --expand --nginx -d app.mydomain.com,mta-sts.mydomain.com
|
||||
```
|
||||
|
||||
## SMTP TLS Reporting
|
||||
|
||||
[TLSRPT](https://datatracker.ietf.org/doc/html/rfc8460) is used by SMTP systems to report failures in establishing TLS-secure sessions as broadcast by the MTA-STS configuration.
|
||||
|
||||
Configuring MTA-STS in `mode: testing` as shown in the previous section gives you time to review failures from some SMTP senders.
|
||||
|
||||
Create a **TXT record** for `_smtp._tls.mydomain.com.` with the following value:
|
||||
|
||||
```txt
|
||||
v=TSLRPTv1; rua=mailto:YOUR_EMAIL
|
||||
```
|
||||
|
||||
The TLSRPT configuration at the DNS level allows SMTP senders that fail to initiate TLS-secure sessions to send reports to a particular email address. We suggest creating a `tls-reports` alias in SimpleLogin for this purpose.
|
||||
|
||||
To verify if the DNS works, the following command
|
||||
|
||||
```bash
|
||||
dig @1.1.1.1 _smtp._tls.mydomain.com txt
|
||||
```
|
||||
|
||||
should return a result similar to this one:
|
||||
|
||||
```
|
||||
_smtp._tls.mydomain.com. 3600 IN TXT "v=TSLRPTv1; rua=mailto:tls-reports@mydomain.com"
|
||||
```
|
||||
|
@ -106,8 +106,6 @@ from app.email_utils import (
|
||||
get_header_unicode,
|
||||
generate_reply_email,
|
||||
is_reverse_alias,
|
||||
normalize_reply_email,
|
||||
is_valid_email,
|
||||
replace,
|
||||
should_disable,
|
||||
parse_id_from_bounce,
|
||||
@ -123,6 +121,7 @@ from app.email_utils import (
|
||||
generate_verp_email,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.errors import (
|
||||
NonReverseAliasInReplyPhase,
|
||||
VERPTransactional,
|
||||
@ -161,6 +160,7 @@ from app.models import (
|
||||
MessageIDMatching,
|
||||
Notification,
|
||||
VerpType,
|
||||
SLDomain,
|
||||
)
|
||||
from app.pgp_utils import (
|
||||
PGPException,
|
||||
@ -235,17 +235,17 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
else:
|
||||
|
||||
try:
|
||||
contact_email_for_reply = (
|
||||
contact_email if is_valid_email(contact_email) else ""
|
||||
)
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
mail_from=mail_from,
|
||||
reply_email=generate_reply_email(contact_email, alias.user)
|
||||
if is_valid_email(contact_email)
|
||||
else NOREPLY,
|
||||
reply_email=generate_reply_email(contact_email_for_reply, alias),
|
||||
automatic_created=True,
|
||||
)
|
||||
if not contact_email:
|
||||
@ -261,7 +261,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||
|
||||
Session.commit()
|
||||
except IntegrityError:
|
||||
LOG.w("Contact %s %s already exist", alias, contact_email)
|
||||
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
|
||||
Session.rollback()
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
|
||||
@ -279,6 +279,9 @@ def get_or_create_reply_to_contact(
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if not is_valid_email(contact_address):
|
||||
LOG.w(
|
||||
"invalid reply-to address %s. Parse from %s",
|
||||
@ -304,7 +307,7 @@ def get_or_create_reply_to_contact(
|
||||
alias_id=alias.id,
|
||||
website_email=contact_address,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_address, alias.user),
|
||||
reply_email=generate_reply_email(contact_address, alias),
|
||||
automatic_created=True,
|
||||
)
|
||||
Session.commit()
|
||||
@ -347,6 +350,10 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
continue
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
contact_name = full_address.display_name
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if contact:
|
||||
# update the contact name if needed
|
||||
if contact.name != full_address.display_name:
|
||||
@ -354,9 +361,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
"Update contact %s name %s to %s",
|
||||
contact,
|
||||
contact.name,
|
||||
full_address.display_name,
|
||||
contact_name,
|
||||
)
|
||||
contact.name = full_address.display_name
|
||||
contact.name = contact_name
|
||||
Session.commit()
|
||||
else:
|
||||
LOG.d(
|
||||
@ -371,8 +378,8 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=full_address.display_name,
|
||||
reply_email=generate_reply_email(contact_email, alias.user),
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
is_cc=header.lower() == "cc",
|
||||
automatic_created=True,
|
||||
)
|
||||
@ -540,12 +547,20 @@ def sign_msg(msg: Message) -> Message:
|
||||
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
||||
|
||||
try:
|
||||
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
|
||||
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
|
||||
if not payload:
|
||||
raise PGPException("Empty signature by gnupg")
|
||||
|
||||
signature.set_payload(payload)
|
||||
except Exception:
|
||||
LOG.e("Cannot sign, try using pgpy")
|
||||
signature.set_payload(
|
||||
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
)
|
||||
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
|
||||
if not payload:
|
||||
raise PGPException("Empty signature by pgpy")
|
||||
|
||||
signature.set_payload(payload)
|
||||
|
||||
container.attach(signature)
|
||||
|
||||
@ -622,8 +637,12 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||
|
||||
user = alias.user
|
||||
|
||||
if user.disabled:
|
||||
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias)
|
||||
if not user.is_active():
|
||||
LOG.w(f"User {user} has been soft deleted")
|
||||
return False, status.E502
|
||||
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot receive emails")
|
||||
if should_ignore_bounce(envelope.mail_from):
|
||||
return [(True, status.E207)]
|
||||
else:
|
||||
@ -693,6 +712,36 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||
LOG.d("%s unverified, do not forward", mailbox)
|
||||
ret.append((False, status.E517))
|
||||
else:
|
||||
# Check if the mailbox is also an alias and stop the loop
|
||||
mailbox_as_alias = Alias.get_by(email=mailbox.email)
|
||||
if mailbox_as_alias is not None:
|
||||
LOG.info(
|
||||
f"Mailbox {mailbox.id} has email {mailbox.email} that is also alias {alias.id}. Stopping loop"
|
||||
)
|
||||
mailbox.verified = False
|
||||
Session.commit()
|
||||
mailbox_url = f"{URL}/dashboard/mailbox/{mailbox.id}/"
|
||||
send_email_with_rate_control(
|
||||
user,
|
||||
ALERT_MAILBOX_IS_ALIAS,
|
||||
user.email,
|
||||
f"Your mailbox {mailbox.email} is an alias",
|
||||
render(
|
||||
"transactional/mailbox-invalid.txt.jinja2",
|
||||
mailbox=mailbox,
|
||||
mailbox_url=mailbox_url,
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/mailbox-invalid.html",
|
||||
mailbox=mailbox,
|
||||
mailbox_url=mailbox_url,
|
||||
alias=alias,
|
||||
),
|
||||
max_nb_alert=1,
|
||||
)
|
||||
ret.append((False, status.E525))
|
||||
continue
|
||||
# create a copy of message for each forward
|
||||
ret.append(
|
||||
forward_email_to_mailbox(
|
||||
@ -815,36 +864,40 @@ def forward_email_to_mailbox(
|
||||
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
|
||||
)
|
||||
|
||||
delete_all_headers_except(
|
||||
msg,
|
||||
[
|
||||
headers.FROM,
|
||||
headers.TO,
|
||||
headers.CC,
|
||||
headers.SUBJECT,
|
||||
headers.DATE,
|
||||
# do not delete original message id
|
||||
headers.MESSAGE_ID,
|
||||
# References and In-Reply-To are used for keeping the email thread
|
||||
headers.REFERENCES,
|
||||
headers.IN_REPLY_TO,
|
||||
]
|
||||
+ headers.MIME_HEADERS,
|
||||
)
|
||||
headers_to_keep = [
|
||||
headers.FROM,
|
||||
headers.TO,
|
||||
headers.CC,
|
||||
headers.SUBJECT,
|
||||
headers.DATE,
|
||||
# do not delete original message id
|
||||
headers.MESSAGE_ID,
|
||||
# References and In-Reply-To are used for keeping the email thread
|
||||
headers.REFERENCES,
|
||||
headers.IN_REPLY_TO,
|
||||
headers.LIST_UNSUBSCRIBE,
|
||||
headers.LIST_UNSUBSCRIBE_POST,
|
||||
] + headers.MIME_HEADERS
|
||||
if user.include_header_email_header:
|
||||
headers_to_keep.append(headers.AUTHENTICATION_RESULTS)
|
||||
delete_all_headers_except(msg, headers_to_keep)
|
||||
|
||||
if mailbox.generic_subject:
|
||||
LOG.d("Use a generic subject for %s", mailbox)
|
||||
orig_subject = msg[headers.SUBJECT]
|
||||
orig_subject = get_header_unicode(orig_subject)
|
||||
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
|
||||
sender = msg[headers.FROM]
|
||||
sender = get_header_unicode(sender)
|
||||
msg = add_header(
|
||||
msg,
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with "{orig_subject}" as subject""",
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
|
||||
)
|
||||
|
||||
# create PGP email if needed
|
||||
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
||||
LOG.d("Encrypt message using mailbox %s", mailbox)
|
||||
if mailbox.generic_subject:
|
||||
LOG.d("Use a generic subject for %s", mailbox)
|
||||
orig_subject = msg[headers.SUBJECT]
|
||||
orig_subject = get_header_unicode(orig_subject)
|
||||
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
|
||||
msg = add_header(
|
||||
msg,
|
||||
f"""Forwarded by SimpleLogin to {alias.email} with "{orig_subject}" as subject""",
|
||||
f"""Forwarded by SimpleLogin to {alias.email} with <b>{orig_subject}</b> as subject""",
|
||||
)
|
||||
|
||||
try:
|
||||
msg = prepare_pgp_message(
|
||||
@ -865,6 +918,11 @@ def forward_email_to_mailbox(
|
||||
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
|
||||
if user.include_header_email_header:
|
||||
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
|
||||
if contact.name:
|
||||
original_from = f"{contact.name} <{contact.website_email}>"
|
||||
else:
|
||||
original_from = contact.website_email
|
||||
msg[headers.SL_ORIGINAL_FROM] = original_from
|
||||
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
|
||||
msg[headers.SL_ENVELOPE_TO] = alias.email
|
||||
|
||||
@ -913,10 +971,11 @@ def forward_email_to_mailbox(
|
||||
envelope.rcpt_options,
|
||||
)
|
||||
|
||||
contact_domain = get_email_domain_part(contact.reply_email)
|
||||
try:
|
||||
sl_sendmail(
|
||||
# use a different envelope sender for each forward (aka VERP)
|
||||
generate_verp_email(VerpType.bounce_forward, email_log.id),
|
||||
generate_verp_email(VerpType.bounce_forward, email_log.id, contact_domain),
|
||||
mailbox.email,
|
||||
msg,
|
||||
envelope.mail_options,
|
||||
@ -985,10 +1044,14 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
|
||||
reply_email = rcpt_to
|
||||
|
||||
# reply_email must end with EMAIL_DOMAIN
|
||||
reply_domain = get_email_domain_part(reply_email)
|
||||
|
||||
# reply_email must end with EMAIL_DOMAIN or a domain that can be used as reverse alias domain
|
||||
if not reply_email.endswith(EMAIL_DOMAIN):
|
||||
LOG.w(f"Reply email {reply_email} has wrong domain")
|
||||
return False, status.E501
|
||||
sl_domain: SLDomain = SLDomain.get_by(domain=reply_domain)
|
||||
if sl_domain is None:
|
||||
LOG.w(f"Reply email {reply_email} has wrong domain")
|
||||
return False, status.E501
|
||||
|
||||
# handle case where reply email is generated with non-allowed char
|
||||
reply_email = normalize_reply_email(reply_email)
|
||||
@ -997,10 +1060,13 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
if not contact:
|
||||
LOG.w(f"No contact with {reply_email} as reverse alias")
|
||||
return False, status.E502
|
||||
if not contact.user.is_active():
|
||||
LOG.w(f"User {contact.user} has been soft deleted")
|
||||
return False, status.E502
|
||||
|
||||
alias = contact.alias
|
||||
alias_address: str = contact.alias.email
|
||||
alias_domain = alias_address[alias_address.find("@") + 1 :]
|
||||
alias_domain = get_email_domain_part(alias_address)
|
||||
|
||||
# Sanity check: verify alias domain is managed by SimpleLogin
|
||||
# scenario: a user have removed a domain but due to a bug, the aliases are still there
|
||||
@ -1011,13 +1077,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
user = alias.user
|
||||
mail_from = envelope.mail_from
|
||||
|
||||
if user.disabled:
|
||||
LOG.e(
|
||||
"User %s disabled, disable sending emails from %s to %s",
|
||||
user,
|
||||
alias,
|
||||
contact,
|
||||
)
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot send emails")
|
||||
return False, status.E504
|
||||
|
||||
# Check if we need to reject or quarantine based on dmarc
|
||||
@ -1143,7 +1204,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
)
|
||||
|
||||
# replace reverse alias by real address for all contacts
|
||||
for (reply_email, website_email) in contact_query.values(
|
||||
for reply_email, website_email in contact_query.values(
|
||||
Contact.reply_email, Contact.website_email
|
||||
):
|
||||
msg = replace(msg, reply_email, website_email)
|
||||
@ -1198,7 +1259,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||
# no need to replace TO header
|
||||
LOG.d("email is sent in BCC mode")
|
||||
del msg[headers.TO]
|
||||
else:
|
||||
replace_header_when_reply(msg, alias, headers.TO)
|
||||
|
||||
@ -1831,24 +1891,30 @@ def handle_transactional_bounce(
|
||||
envelope: Envelope, msg, rcpt_to, transactional_id=None
|
||||
):
|
||||
LOG.d("handle transactional bounce sent to %s", rcpt_to)
|
||||
if transactional_id is None:
|
||||
LOG.i(
|
||||
f"No transactional record for {envelope.mail_from} -> {envelope.rcpt_tos}"
|
||||
)
|
||||
return
|
||||
|
||||
# parse the TransactionalEmail
|
||||
transactional_id = transactional_id or parse_id_from_bounce(rcpt_to)
|
||||
transactional = TransactionalEmail.get(transactional_id)
|
||||
|
||||
# a transaction might have been deleted in delete_logs()
|
||||
if transactional:
|
||||
LOG.i("Create bounce for %s", transactional.email)
|
||||
bounce_info = get_mailbox_bounce_info(msg)
|
||||
if bounce_info:
|
||||
Bounce.create(
|
||||
email=transactional.email,
|
||||
info=bounce_info.as_bytes().decode(),
|
||||
commit=True,
|
||||
)
|
||||
else:
|
||||
LOG.w("cannot get bounce info, debug at %s", save_email_for_debugging(msg))
|
||||
Bounce.create(email=transactional.email, commit=True)
|
||||
if not transactional:
|
||||
LOG.i(
|
||||
f"No transactional record for {envelope.mail_from} -> {envelope.rcpt_tos}"
|
||||
)
|
||||
return
|
||||
LOG.i("Create bounce for %s", transactional.email)
|
||||
bounce_info = get_mailbox_bounce_info(msg)
|
||||
if bounce_info:
|
||||
Bounce.create(
|
||||
email=transactional.email,
|
||||
info=bounce_info.as_bytes().decode(),
|
||||
commit=True,
|
||||
)
|
||||
else:
|
||||
LOG.w("cannot get bounce info, debug at %s", save_email_for_debugging(msg))
|
||||
Bounce.create(email=transactional.email, commit=True)
|
||||
|
||||
|
||||
def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
||||
@ -1869,6 +1935,9 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
||||
contact,
|
||||
alias,
|
||||
)
|
||||
if not email_log.user.is_active():
|
||||
LOG.d(f"User {email_log.user} is not active")
|
||||
return status.E510
|
||||
|
||||
if email_log.is_reply:
|
||||
content_type = msg.get_content_type().lower()
|
||||
@ -1899,7 +1968,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
||||
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
||||
res.append((is_delivered, smtp_status))
|
||||
|
||||
for (is_success, smtp_status) in res:
|
||||
for is_success, smtp_status in res:
|
||||
# Consider all deliveries successful if 1 delivery is successful
|
||||
if is_success:
|
||||
return smtp_status
|
||||
@ -1930,6 +1999,9 @@ def send_no_reply_response(mail_from: str, msg: Message):
|
||||
if not mailbox:
|
||||
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
|
||||
return
|
||||
if not mailbox.user.is_active():
|
||||
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
|
||||
return
|
||||
send_email_at_most_times(
|
||||
mailbox.user,
|
||||
ALERT_TO_NOREPLY,
|
||||
@ -2219,7 +2291,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
||||
if nb_success > 0 and nb_non_success > 0:
|
||||
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
||||
|
||||
for (is_success, smtp_status) in res:
|
||||
for is_success, smtp_status in res:
|
||||
# Consider all deliveries successful if 1 delivery is successful
|
||||
if is_success:
|
||||
return smtp_status
|
||||
|
@ -42,14 +42,16 @@ def add_sl_domains():
|
||||
LOG.d("%s is already a SL domain", alias_domain)
|
||||
else:
|
||||
LOG.i("Add %s to SL domain", alias_domain)
|
||||
SLDomain.create(domain=alias_domain)
|
||||
SLDomain.create(domain=alias_domain, use_as_reverse_alias=True)
|
||||
|
||||
for premium_domain in PREMIUM_ALIAS_DOMAINS:
|
||||
if SLDomain.get_by(domain=premium_domain):
|
||||
LOG.d("%s is already a SL domain", premium_domain)
|
||||
else:
|
||||
LOG.i("Add %s to SL domain", premium_domain)
|
||||
SLDomain.create(domain=premium_domain, premium_only=True)
|
||||
SLDomain.create(
|
||||
domain=premium_domain, premium_only=True, use_as_reverse_alias=True
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
@ -192,7 +192,6 @@ amigos
|
||||
amines
|
||||
amnion
|
||||
amoeba
|
||||
amoral
|
||||
amount
|
||||
amours
|
||||
ampere
|
||||
@ -215,7 +214,6 @@ animus
|
||||
anions
|
||||
ankles
|
||||
anklet
|
||||
annals
|
||||
anneal
|
||||
annoys
|
||||
annual
|
||||
@ -364,7 +362,6 @@ auntie
|
||||
aureus
|
||||
aurora
|
||||
author
|
||||
autism
|
||||
autumn
|
||||
avails
|
||||
avatar
|
||||
@ -638,14 +635,12 @@ bigwig
|
||||
bijoux
|
||||
bikers
|
||||
biking
|
||||
bikini
|
||||
bilges
|
||||
bilked
|
||||
bilker
|
||||
billed
|
||||
billet
|
||||
billow
|
||||
bimbos
|
||||
binary
|
||||
binder
|
||||
binged
|
||||
@ -710,8 +705,6 @@ blocks
|
||||
blokes
|
||||
blonde
|
||||
blonds
|
||||
bloods
|
||||
bloody
|
||||
blooms
|
||||
bloops
|
||||
blotch
|
||||
@ -817,8 +810,6 @@ bounds
|
||||
bounty
|
||||
bovine
|
||||
bovver
|
||||
bowels
|
||||
bowers
|
||||
bowing
|
||||
bowled
|
||||
bowleg
|
||||
@ -827,10 +818,8 @@ bowman
|
||||
bowmen
|
||||
bowwow
|
||||
boxcar
|
||||
boxers
|
||||
boxier
|
||||
boxing
|
||||
boyish
|
||||
braced
|
||||
bracer
|
||||
braces
|
||||
@ -861,7 +850,6 @@ breach
|
||||
breads
|
||||
breaks
|
||||
breams
|
||||
breast
|
||||
breath
|
||||
breech
|
||||
breeds
|
||||
@ -872,9 +860,6 @@ brevet
|
||||
brewed
|
||||
brewer
|
||||
briars
|
||||
bribed
|
||||
briber
|
||||
bribes
|
||||
bricks
|
||||
bridal
|
||||
brides
|
||||
@ -926,13 +911,7 @@ buffed
|
||||
buffer
|
||||
buffet
|
||||
bugged
|
||||
bugger
|
||||
bugled
|
||||
bugler
|
||||
bugles
|
||||
builds
|
||||
bulged
|
||||
bulges
|
||||
bulked
|
||||
bulled
|
||||
bullet
|
||||
@ -1340,8 +1319,6 @@ clingy
|
||||
clinic
|
||||
clinks
|
||||
clique
|
||||
cloaca
|
||||
cloaks
|
||||
cloche
|
||||
clocks
|
||||
clomps
|
||||
@ -1448,7 +1425,6 @@ comply
|
||||
compos
|
||||
conchs
|
||||
concur
|
||||
condom
|
||||
condor
|
||||
condos
|
||||
coneys
|
||||
@ -1568,8 +1544,6 @@ cranes
|
||||
cranks
|
||||
cranky
|
||||
cranny
|
||||
crapes
|
||||
crappy
|
||||
crated
|
||||
crater
|
||||
crates
|
||||
@ -1585,7 +1559,6 @@ crazes
|
||||
creaks
|
||||
creaky
|
||||
creams
|
||||
creamy
|
||||
crease
|
||||
create
|
||||
creche
|
||||
@ -1594,8 +1567,6 @@ credos
|
||||
creeds
|
||||
creeks
|
||||
creels
|
||||
creeps
|
||||
creepy
|
||||
cremes
|
||||
creole
|
||||
crepes
|
||||
@ -1728,9 +1699,6 @@ dainty
|
||||
daises
|
||||
damage
|
||||
damask
|
||||
dammed
|
||||
dammit
|
||||
damned
|
||||
damped
|
||||
dampen
|
||||
damper
|
||||
@ -1754,7 +1722,6 @@ darers
|
||||
daring
|
||||
darken
|
||||
darker
|
||||
darkie
|
||||
darkly
|
||||
darned
|
||||
darner
|
||||
@ -1763,8 +1730,6 @@ darter
|
||||
dashed
|
||||
dasher
|
||||
dashes
|
||||
daters
|
||||
dating
|
||||
dative
|
||||
daubed
|
||||
dauber
|
||||
@ -1921,7 +1886,6 @@ dharma
|
||||
dhotis
|
||||
diadem
|
||||
dialog
|
||||
diaper
|
||||
diatom
|
||||
dibble
|
||||
dicier
|
||||
@ -1943,7 +1907,6 @@ digits
|
||||
diking
|
||||
diktat
|
||||
dilate
|
||||
dildos
|
||||
dilute
|
||||
dimity
|
||||
dimmed
|
||||
@ -2058,7 +2021,6 @@ dotted
|
||||
double
|
||||
doubly
|
||||
doubts
|
||||
douche
|
||||
doughy
|
||||
dourer
|
||||
dourly
|
||||
@ -2139,15 +2101,6 @@ duenna
|
||||
duffed
|
||||
duffer
|
||||
dugout
|
||||
dulcet
|
||||
dulled
|
||||
duller
|
||||
dumber
|
||||
dumbly
|
||||
dumbos
|
||||
dumdum
|
||||
dumped
|
||||
dumper
|
||||
dunces
|
||||
dunged
|
||||
dunked
|
||||
@ -2285,7 +2238,6 @@ endows
|
||||
endued
|
||||
endues
|
||||
endure
|
||||
enemas
|
||||
energy
|
||||
enfold
|
||||
engage
|
||||
@ -2333,7 +2285,6 @@ erects
|
||||
ermine
|
||||
eroded
|
||||
erodes
|
||||
erotic
|
||||
errand
|
||||
errant
|
||||
errata
|
||||
@ -2344,7 +2295,6 @@ eructs
|
||||
erupts
|
||||
escape
|
||||
eschew
|
||||
escort
|
||||
escrow
|
||||
escudo
|
||||
espied
|
||||
@ -2363,7 +2313,6 @@ ethnic
|
||||
etudes
|
||||
euchre
|
||||
eulogy
|
||||
eunuch
|
||||
eureka
|
||||
evaded
|
||||
evader
|
||||
@ -2392,7 +2341,6 @@ exempt
|
||||
exerts
|
||||
exeunt
|
||||
exhale
|
||||
exhort
|
||||
exhume
|
||||
exiled
|
||||
exiles
|
||||
@ -2415,7 +2363,6 @@ extant
|
||||
extend
|
||||
extent
|
||||
extols
|
||||
extort
|
||||
extras
|
||||
exuded
|
||||
exudes
|
||||
@ -2440,7 +2387,6 @@ faeces
|
||||
faerie
|
||||
faffed
|
||||
fagged
|
||||
faggot
|
||||
failed
|
||||
faille
|
||||
fainer
|
||||
@ -2473,18 +2419,10 @@ faring
|
||||
farmed
|
||||
farmer
|
||||
farrow
|
||||
farted
|
||||
fascia
|
||||
fasted
|
||||
fasten
|
||||
faster
|
||||
father
|
||||
fathom
|
||||
fating
|
||||
fatsos
|
||||
fatten
|
||||
fatter
|
||||
fatwas
|
||||
faucet
|
||||
faults
|
||||
faulty
|
||||
@ -2532,7 +2470,6 @@ fesses
|
||||
festal
|
||||
fester
|
||||
feting
|
||||
fetish
|
||||
fetter
|
||||
fettle
|
||||
feudal
|
||||
@ -2617,9 +2554,7 @@ flaked
|
||||
flakes
|
||||
flambe
|
||||
flamed
|
||||
flamer
|
||||
flames
|
||||
flange
|
||||
flanks
|
||||
flared
|
||||
flares
|
||||
@ -2754,8 +2689,6 @@ franks
|
||||
frappe
|
||||
frauds
|
||||
frayed
|
||||
freaks
|
||||
freaky
|
||||
freely
|
||||
freest
|
||||
freeze
|
||||
@ -2795,8 +2728,6 @@ fryers
|
||||
frying
|
||||
ftpers
|
||||
ftping
|
||||
fucked
|
||||
fucker
|
||||
fuddle
|
||||
fudged
|
||||
fudges
|
||||
@ -2891,10 +2822,7 @@ gasbag
|
||||
gashed
|
||||
gashes
|
||||
gasket
|
||||
gasman
|
||||
gasmen
|
||||
gasped
|
||||
gassed
|
||||
gasses
|
||||
gateau
|
||||
gather
|
||||
@ -3104,7 +3032,6 @@ grimed
|
||||
grimes
|
||||
grimly
|
||||
grinds
|
||||
gringo
|
||||
griped
|
||||
griper
|
||||
gripes
|
||||
@ -3186,8 +3113,6 @@ gypsum
|
||||
gyrate
|
||||
gyving
|
||||
habits
|
||||
hacked
|
||||
hacker
|
||||
hackle
|
||||
hadith
|
||||
haggis
|
||||
@ -3195,8 +3120,6 @@ haggle
|
||||
hailed
|
||||
hairdo
|
||||
haired
|
||||
hajjes
|
||||
hajjis
|
||||
halest
|
||||
haling
|
||||
halite
|
||||
@ -3223,11 +3146,8 @@ happen
|
||||
haptic
|
||||
harass
|
||||
harden
|
||||
harder
|
||||
hardly
|
||||
harems
|
||||
haring
|
||||
harked
|
||||
harlot
|
||||
harmed
|
||||
harped
|
||||
@ -3407,7 +3327,6 @@ hoofed
|
||||
hoofer
|
||||
hookah
|
||||
hooked
|
||||
hooker
|
||||
hookup
|
||||
hooped
|
||||
hoopla
|
||||
@ -3459,8 +3378,6 @@ huffed
|
||||
hugely
|
||||
hugest
|
||||
hugged
|
||||
hulled
|
||||
huller
|
||||
humane
|
||||
humans
|
||||
humble
|
||||
@ -3667,8 +3584,6 @@ jacket
|
||||
jading
|
||||
jagged
|
||||
jaguar
|
||||
jailed
|
||||
jailer
|
||||
jalopy
|
||||
jammed
|
||||
jangle
|
||||
@ -3689,8 +3604,6 @@ jejune
|
||||
jelled
|
||||
jellos
|
||||
jennet
|
||||
jerked
|
||||
jerkin
|
||||
jersey
|
||||
jested
|
||||
jester
|
||||
@ -3814,11 +3727,7 @@ kidded
|
||||
kidder
|
||||
kiddie
|
||||
kiddos
|
||||
kidnap
|
||||
kidney
|
||||
killed
|
||||
killer
|
||||
kilned
|
||||
kilted
|
||||
kilter
|
||||
kimono
|
||||
@ -3827,15 +3736,11 @@ kinder
|
||||
kindle
|
||||
kindly
|
||||
kingly
|
||||
kinked
|
||||
kiosks
|
||||
kipped
|
||||
kipper
|
||||
kirsch
|
||||
kismet
|
||||
kissed
|
||||
kisser
|
||||
kisses
|
||||
kiting
|
||||
kitsch
|
||||
kitted
|
||||
@ -3847,10 +3752,6 @@ kluges
|
||||
klutzy
|
||||
knacks
|
||||
knaves
|
||||
kneads
|
||||
kneels
|
||||
knells
|
||||
knifed
|
||||
knifes
|
||||
knight
|
||||
knives
|
||||
@ -4210,8 +4111,6 @@ lunges
|
||||
lupine
|
||||
lupins
|
||||
luring
|
||||
lurked
|
||||
lurker
|
||||
lusher
|
||||
lushes
|
||||
lushly
|
||||
@ -4608,7 +4507,6 @@ muggle
|
||||
mukluk
|
||||
mulcts
|
||||
mulish
|
||||
mullah
|
||||
mulled
|
||||
mullet
|
||||
mumble
|
||||
@ -4721,9 +4619,6 @@ nickel
|
||||
nicker
|
||||
nickle
|
||||
nieces
|
||||
niggas
|
||||
niggaz
|
||||
nigger
|
||||
niggle
|
||||
nigher
|
||||
nights
|
||||
@ -4736,7 +4631,6 @@ ninjas
|
||||
ninths
|
||||
nipped
|
||||
nipper
|
||||
nipple
|
||||
nitric
|
||||
nitwit
|
||||
nixing
|
||||
@ -4781,15 +4675,6 @@ nozzle
|
||||
nuance
|
||||
nubbin
|
||||
nubile
|
||||
nuclei
|
||||
nudest
|
||||
nudged
|
||||
nudges
|
||||
nudism
|
||||
nudist
|
||||
nudity
|
||||
nugget
|
||||
nuking
|
||||
numbed
|
||||
number
|
||||
numbly
|
||||
@ -4804,7 +4689,6 @@ nutter
|
||||
nuzzle
|
||||
nybble
|
||||
nylons
|
||||
nympho
|
||||
nymphs
|
||||
oafish
|
||||
oaring
|
||||
@ -4885,7 +4769,6 @@ opting
|
||||
option
|
||||
opuses
|
||||
oracle
|
||||
orally
|
||||
orange
|
||||
orated
|
||||
orates
|
||||
@ -4897,7 +4780,6 @@ ordeal
|
||||
orders
|
||||
ordure
|
||||
organs
|
||||
orgasm
|
||||
orgies
|
||||
oriels
|
||||
orient
|
||||
@ -4993,10 +4875,6 @@ pander
|
||||
panels
|
||||
panics
|
||||
panned
|
||||
panted
|
||||
pantie
|
||||
pantos
|
||||
pantry
|
||||
papacy
|
||||
papaya
|
||||
papers
|
||||
@ -5078,7 +4956,6 @@ pebble
|
||||
pebbly
|
||||
pecans
|
||||
pecked
|
||||
pecker
|
||||
pectic
|
||||
pectin
|
||||
pedalo
|
||||
@ -5151,9 +5028,6 @@ phenom
|
||||
phials
|
||||
phlegm
|
||||
phloem
|
||||
phobia
|
||||
phobic
|
||||
phoebe
|
||||
phoned
|
||||
phones
|
||||
phoney
|
||||
@ -5228,9 +5102,6 @@ piques
|
||||
piracy
|
||||
pirate
|
||||
pirogi
|
||||
pissed
|
||||
pisser
|
||||
pisses
|
||||
pistes
|
||||
pistil
|
||||
pistol
|
||||
@ -5311,8 +5182,6 @@ pogrom
|
||||
points
|
||||
pointy
|
||||
poised
|
||||
poises
|
||||
poison
|
||||
pokers
|
||||
pokeys
|
||||
pokier
|
||||
@ -5422,7 +5291,6 @@ preyed
|
||||
priced
|
||||
prices
|
||||
pricey
|
||||
pricks
|
||||
prided
|
||||
prides
|
||||
priers
|
||||
@ -5602,14 +5470,9 @@ rabbit
|
||||
rabble
|
||||
rabies
|
||||
raceme
|
||||
racers
|
||||
racial
|
||||
racier
|
||||
racily
|
||||
racing
|
||||
racism
|
||||
racist
|
||||
racked
|
||||
racket
|
||||
radars
|
||||
radial
|
||||
@ -5661,8 +5524,6 @@ rapers
|
||||
rapids
|
||||
rapier
|
||||
rapine
|
||||
raping
|
||||
rapist
|
||||
rapped
|
||||
rappel
|
||||
rapper
|
||||
@ -5747,7 +5608,6 @@ recoup
|
||||
rectal
|
||||
rector
|
||||
rectos
|
||||
rectum
|
||||
recurs
|
||||
recuse
|
||||
redact
|
||||
@ -5891,7 +5751,6 @@ resume
|
||||
retail
|
||||
retain
|
||||
retake
|
||||
retard
|
||||
retell
|
||||
retest
|
||||
retied
|
||||
@ -6125,8 +5984,6 @@ sadden
|
||||
sadder
|
||||
saddle
|
||||
sadhus
|
||||
sadism
|
||||
sadist
|
||||
safari
|
||||
safely
|
||||
safest
|
||||
@ -6364,16 +6221,6 @@ severs
|
||||
sewage
|
||||
sewers
|
||||
sewing
|
||||
sexier
|
||||
sexily
|
||||
sexing
|
||||
sexism
|
||||
sexist
|
||||
sexpot
|
||||
sextet
|
||||
sexton
|
||||
sexual
|
||||
shabby
|
||||
shacks
|
||||
shaded
|
||||
shades
|
||||
@ -6383,10 +6230,7 @@ shaggy
|
||||
shaken
|
||||
shaker
|
||||
shakes
|
||||
shalom
|
||||
shaman
|
||||
shamed
|
||||
shames
|
||||
shandy
|
||||
shanks
|
||||
shanty
|
||||
@ -6432,7 +6276,6 @@ shirks
|
||||
shirrs
|
||||
shirts
|
||||
shirty
|
||||
shitty
|
||||
shiver
|
||||
shoals
|
||||
shoats
|
||||
@ -6575,9 +6418,6 @@ slangy
|
||||
slants
|
||||
slated
|
||||
slates
|
||||
slaved
|
||||
slaver
|
||||
slaves
|
||||
slayed
|
||||
slayer
|
||||
sleaze
|
||||
@ -6672,7 +6512,6 @@ snarks
|
||||
snarky
|
||||
snarls
|
||||
snarly
|
||||
snatch
|
||||
snazzy
|
||||
sneaks
|
||||
sneaky
|
||||
@ -6716,7 +6555,6 @@ socket
|
||||
sodded
|
||||
sodden
|
||||
sodium
|
||||
sodomy
|
||||
soever
|
||||
soften
|
||||
softer
|
||||
@ -7468,7 +7306,6 @@ torrid
|
||||
torsos
|
||||
tortes
|
||||
tossed
|
||||
tosser
|
||||
tosses
|
||||
tossup
|
||||
totals
|
||||
@ -7686,7 +7523,6 @@ unhook
|
||||
unhurt
|
||||
unions
|
||||
unique
|
||||
unisex
|
||||
unison
|
||||
united
|
||||
unites
|
||||
@ -7793,7 +7629,6 @@ vacant
|
||||
vacate
|
||||
vacuum
|
||||
vagary
|
||||
vagina
|
||||
vaguer
|
||||
vainer
|
||||
vainly
|
||||
@ -7930,9 +7765,6 @@ votive
|
||||
vowels
|
||||
vowing
|
||||
voyage
|
||||
voyeur
|
||||
vulgar
|
||||
vulvae
|
||||
wabbit
|
||||
wacker
|
||||
wackos
|
||||
@ -7975,7 +7807,6 @@ wander
|
||||
wangle
|
||||
waning
|
||||
wanked
|
||||
wanker
|
||||
wanner
|
||||
wanted
|
||||
wanton
|
||||
|
332515
app/local_data/words.txt
332515
app/local_data/words.txt
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
31
app/migrations/versions/2023_040318_5f4a5625da66_.py
Normal file
31
app/migrations/versions/2023_040318_5f4a5625da66_.py
Normal file
@ -0,0 +1,31 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 5f4a5625da66
|
||||
Revises: 2c2093c82bc0
|
||||
Create Date: 2023-04-03 18:30:46.488231
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5f4a5625da66'
|
||||
down_revision = '2c2093c82bc0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('public_domain', sa.Column('partner_id', sa.Integer(), nullable=True))
|
||||
op.create_foreign_key(None, 'public_domain', 'partner', ['partner_id'], ['id'], ondelete='cascade')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'public_domain', type_='foreignkey')
|
||||
op.drop_column('public_domain', 'partner_id')
|
||||
# ### end Alembic commands ###
|
29
app/migrations/versions/2023_041418_893c0d18475f_.py
Normal file
29
app/migrations/versions/2023_041418_893c0d18475f_.py
Normal file
@ -0,0 +1,29 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 893c0d18475f
|
||||
Revises: 5f4a5625da66
|
||||
Create Date: 2023-04-14 18:20:03.807367
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '893c0d18475f'
|
||||
down_revision = '5f4a5625da66'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_contact_pgp_finger_print'), 'contact', ['pgp_finger_print'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_contact_pgp_finger_print'), table_name='contact')
|
||||
# ### end Alembic commands ###
|
35
app/migrations/versions/2023_041419_bc496c0a0279_.py
Normal file
35
app/migrations/versions/2023_041419_bc496c0a0279_.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: bc496c0a0279
|
||||
Revises: 893c0d18475f
|
||||
Create Date: 2023-04-14 19:09:38.540514
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bc496c0a0279'
|
||||
down_revision = '893c0d18475f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_alias_used_on_alias_id'), 'alias_used_on', ['alias_id'], unique=False)
|
||||
op.create_index(op.f('ix_client_user_alias_id'), 'client_user', ['alias_id'], unique=False)
|
||||
op.create_index(op.f('ix_hibp_notified_alias_alias_id'), 'hibp_notified_alias', ['alias_id'], unique=False)
|
||||
op.create_index(op.f('ix_users_newsletter_alias_id'), 'users', ['newsletter_alias_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_users_newsletter_alias_id'), table_name='users')
|
||||
op.drop_index(op.f('ix_hibp_notified_alias_alias_id'), table_name='hibp_notified_alias')
|
||||
op.drop_index(op.f('ix_client_user_alias_id'), table_name='client_user')
|
||||
op.drop_index(op.f('ix_alias_used_on_alias_id'), table_name='alias_used_on')
|
||||
# ### end Alembic commands ###
|
29
app/migrations/versions/2023_041520_2d89315ac650_.py
Normal file
29
app/migrations/versions/2023_041520_2d89315ac650_.py
Normal file
@ -0,0 +1,29 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 2d89315ac650
|
||||
Revises: bc496c0a0279
|
||||
Create Date: 2023-04-15 20:43:44.218020
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2d89315ac650'
|
||||
down_revision = 'bc496c0a0279'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_partner_subscription_end_at'), 'partner_subscription', ['end_at'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_partner_subscription_end_at'), table_name='partner_subscription')
|
||||
# ### end Alembic commands ###
|
29
app/migrations/versions/2023_041916_01e2997e90d3_.py
Normal file
29
app/migrations/versions/2023_041916_01e2997e90d3_.py
Normal file
@ -0,0 +1,29 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 01e2997e90d3
|
||||
Revises: 893c0d18475f
|
||||
Create Date: 2023-04-19 16:09:11.851588
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '01e2997e90d3'
|
||||
down_revision = '893c0d18475f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('public_domain', sa.Column('use_as_reverse_alias', sa.Boolean(), server_default='0', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('public_domain', 'use_as_reverse_alias')
|
||||
# ### end Alembic commands ###
|
25
app/migrations/versions/2023_042011_2634b41f54db_.py
Normal file
25
app/migrations/versions/2023_042011_2634b41f54db_.py
Normal file
@ -0,0 +1,25 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 2634b41f54db
|
||||
Revises: 01e2997e90d3, 2d89315ac650
|
||||
Create Date: 2023-04-20 11:47:43.048536
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2634b41f54db'
|
||||
down_revision = ('01e2997e90d3', '2d89315ac650')
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user