Compare commits
66 Commits
Author | SHA1 | Date | |
---|---|---|---|
89fad50529 | |||
d09b3b992c | |||
ef9c09f76e | |||
0fa4b1b7ee | |||
2904d04a2c | |||
a5801551d0 | |||
9c2a35193c | |||
e47e5a5255 | |||
ed37325b32 | |||
dd6005ffdf | |||
664cd32f81 | |||
33f0eb6c41 | |||
9fd2fa9a78 | |||
3c77f8af4b | |||
545eeda79b | |||
01dba12ed0 | |||
c872d43c3d | |||
3e6867bc17 | |||
a829074584 | |||
25834e8f61 | |||
a62b43b7c4 | |||
44fda2d94e | |||
bc48198bb1 | |||
da6e56c4eb | |||
798b58529c | |||
3da6c983e1 | |||
294232a329 | |||
fae9d7bc17 | |||
d666f5af3f | |||
556fae02d5 | |||
fd4c67c3d1 | |||
edef254529 | |||
357f0cca57 | |||
8ce90e27f7 | |||
3ecc8d36f9 | |||
14f4829fab | |||
63ac89e952 | |||
8896f00124 | |||
d313c94f77 | |||
39fcf2e48f | |||
41a5a65f51 | |||
1d0c7ec4a0 | |||
4de5b8eb6d | |||
0942f5eba3 | |||
dae6f64482 | |||
e7f0f81d85 | |||
e82190f227 | |||
9002bbad09 | |||
f51d31f431 | |||
c67b97fe32 | |||
bd414b1fc7 | |||
0f73a14926 | |||
0ea33ca5f8 | |||
4e178ad676 | |||
24ba25ab6a | |||
78184eeae4 | |||
c111fbe8e1 | |||
d5981588e4 | |||
6af1c2ccf4 | |||
76664f6e4c | |||
f7125618c4 | |||
050cef0e4e | |||
0d557ef875 | |||
6e56ea4489 | |||
def0de643b | |||
9e7cb2c7dd |
52
.drone.yml
52
.drone.yml
@ -1,52 +0,0 @@
|
||||
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: build-multiarch-images
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: make-tags
|
||||
image: node
|
||||
commands:
|
||||
- echo -n "${DRONE_TAG}, latest" > .tags
|
||||
|
||||
- name: build
|
||||
image: thegeeklab/drone-docker-buildx
|
||||
privileged: true
|
||||
settings:
|
||||
provenance: false
|
||||
dockerfile: app/Dockerfile
|
||||
context: app
|
||||
registry: git.mrmeeb.stream
|
||||
username:
|
||||
from_secret: docker_username
|
||||
password:
|
||||
from_secret: docker_password
|
||||
repo: git.mrmeeb.stream/mrmeeb/simple-login
|
||||
platforms:
|
||||
- linux/arm64
|
||||
- linux/amd64
|
||||
|
||||
- name: notify
|
||||
image: plugins/slack
|
||||
when:
|
||||
status:
|
||||
- success
|
||||
- failure
|
||||
- killed
|
||||
settings:
|
||||
webhook:
|
||||
from_secret: slack_webhook
|
||||
icon_url:
|
||||
from_secret: slack_avatar
|
||||
|
||||
trigger:
|
||||
event:
|
||||
include:
|
||||
- tag
|
||||
ref:
|
||||
include:
|
||||
- refs/tags/**
|
195
.gitea/workflows/build-release-image.yaml
Normal file
195
.gitea/workflows/build-release-image.yaml
Normal file
@ -0,0 +1,195 @@
|
||||
name: Build-Release-Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
CONTAINER_NAME: git.mrmeeb.stream/mrmeeb/simple-login
|
||||
TEA_VERSION: 0.9.2
|
||||
|
||||
jobs:
|
||||
|
||||
Build-Image:
|
||||
runs-on: [ubuntu-docker-latest, "${{ matrix.platform }}"]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Prepare tags
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: ${{ env.CONTAINER_NAME }}
|
||||
# tags: |
|
||||
# type=pep440,pattern={{version}}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.mrmeeb.stream
|
||||
username: ${{ env.GITHUB_ACTOR }}
|
||||
password: ${{ secrets.GTCR_TOKEN }}
|
||||
- name: Build and push by digest
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
context: ./app
|
||||
platforms: ${{ matrix.platform }}
|
||||
provenance: false
|
||||
outputs: type=image,name=${{ env.CONTAINER_NAME }},push-by-digest=true,name-canonical=true,push=true
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Merge-Images:
|
||||
runs-on: ubuntu-docker-latest
|
||||
needs: [Build-Image]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# Not needed currently due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Prepare Docker metadata
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: ${{ env.CONTAINER_NAME }}
|
||||
- name: Login to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.mrmeeb.stream
|
||||
username: ${{ env.GITHUB_ACTOR }}
|
||||
password: ${{ secrets.GTCR_TOKEN }}
|
||||
- name: Create manifest latest
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker manifest create ${{ env.CONTAINER_NAME }}:latest \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:latest ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
docker manifest inspect ${{ env.CONTAINER_NAME }}:latest
|
||||
|
||||
docker manifest push ${{ env.CONTAINER_NAME }}:latest
|
||||
- name: Create manifest tagged
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker manifest create ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2) \
|
||||
--amend ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch amd64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-amd64/* | cut -d / -f 2)
|
||||
#docker manifest annotate --arch arm64 --os linux ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }} ${{ env.CONTAINER_NAME }}@sha256:$(ls -p digests-linux-arm64/* | cut -d / -f 2)
|
||||
docker manifest inspect ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
|
||||
|
||||
docker manifest push ${{ env.CONTAINER_NAME }}:${{ env.RELEASE_VERSION }}
|
||||
# Disabled due to https://github.com/go-gitea/gitea/issues/29563
|
||||
#- name: Create manifest list and push
|
||||
# working-directory: /tmp/digests
|
||||
# run: |
|
||||
# echo $DOCKER_METADATA_OUTPUT_JSON
|
||||
# echo $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
|
||||
# docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
# $(printf '${{ env.CONTAINER_NAME }}@sha256:%s ' $(ls -p */* | cut -d / -f 2))
|
||||
#- name: Inspect image
|
||||
# run: |
|
||||
# docker buildx imagetools inspect ${{ env.CONTAINER_NAME }}:${{ steps.meta.outputs.version }}
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Build ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Create-Release:
|
||||
runs-on: [ubuntu-latest, linux/amd64]
|
||||
needs: [Merge-Images]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get tag
|
||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
- name: Prepare tea
|
||||
run: |
|
||||
# Download tea from Gitea release page
|
||||
echo "Downloading Tea v${{ env.TEA_VERSION }}" && \
|
||||
wget -q -O tea https://gitea.com/gitea/tea/releases/download/v${{ env.TEA_VERSION }}/tea-${{ env.TEA_VERSION }}-linux-amd64 && \
|
||||
echo "Downloaded Tea" && \
|
||||
chmod +x tea && \
|
||||
# Login to Gitea
|
||||
echo "Logging in to Gitea using Tea" && \
|
||||
./tea login add --name SimpleLogin --url https://git.mrmeeb.stream --token ${{ secrets.GITHUB_TOKEN }} && \
|
||||
echo "Done"
|
||||
- name: Make release
|
||||
run: |
|
||||
echo "Creating release" && \
|
||||
./tea release create --login "SimpleLogin" --repo ${{ env.GITHUB_REPOSITORY }} --tag ${{ env.RELEASE_VERSION }} -t ${{ env.RELEASE_VERSION }} -n "Triggered by release of v${{ env.RELEASE_VERSION }} by the SimpleLogin team. <a href=\"https://github.com/simple-login/app/releases/tag/v${{ env.RELEASE_VERSION }}\" target=\"_blank\">View the changelog</a>" && \
|
||||
echo "Done"
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: failure()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
||||
|
||||
Notify:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [Build-Image, Merge-Images, Create-Release]
|
||||
steps:
|
||||
- name: Notify
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: always()
|
||||
with:
|
||||
severity: ${{ job.status == 'success' && 'info' || (job.status == 'cancelled' && 'warn' || 'error') }}
|
||||
details: Release ${{ job.status == 'success' && 'succeeded' || (job.status == 'cancelled' && 'cancelled' || 'failed') }}!
|
||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
username: Gitea
|
||||
avatarUrl: ${{ vars.RUNNER_ICON_URL }}
|
@ -14,4 +14,4 @@ venv/
|
||||
.venv
|
||||
.coverage
|
||||
htmlcov
|
||||
.git/
|
||||
.git/
|
||||
|
62
app/.github/workflows/main.yml
vendored
62
app/.github/workflows/main.yml
vendored
@ -1,7 +1,12 @@
|
||||
name: Test and lint
|
||||
name: SimpleLogin actions
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
@ -10,35 +15,29 @@ jobs:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
# Install a specific version of uv.
|
||||
version: "0.5.21"
|
||||
enable-cache: true
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
if: steps.setup-uv.outputs.cache-hit != 'true'
|
||||
run: uv sync --locked --all-extras
|
||||
|
||||
- name: Check formatting & linting
|
||||
run: |
|
||||
poetry run pre-commit run --all-files
|
||||
uv run pre-commit run --all-files
|
||||
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
|
||||
# service containers to run with `postgres-job`
|
||||
services:
|
||||
@ -70,23 +69,21 @@ jobs:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
# Install a specific version of uv.
|
||||
version: "0.5.21"
|
||||
enable-cache: true
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
if: steps.setup-uv.outputs.cache-hit != 'true'
|
||||
run: uv sync --locked --all-extras
|
||||
|
||||
|
||||
- name: Start Redis v6
|
||||
@ -96,21 +93,21 @@ jobs:
|
||||
|
||||
- name: Run db migration
|
||||
run: |
|
||||
CONFIG=tests/test.env poetry run alembic upgrade head
|
||||
CONFIG=tests/test.env uv run alembic upgrade head
|
||||
|
||||
- name: Prepare version file
|
||||
run: |
|
||||
scripts/generate-build-info.sh ${{ github.sha }}
|
||||
scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
|
||||
cat app/build_info.py
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
poetry run pytest
|
||||
uv run pytest
|
||||
env:
|
||||
GITHUB_ACTIONS_TEST: true
|
||||
|
||||
- name: Archive code coverage results
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: code-coverage-report
|
||||
path: htmlcov
|
||||
@ -139,6 +136,12 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Create Sentry release
|
||||
uses: getsentry/action-release@v1
|
||||
env:
|
||||
@ -151,13 +154,14 @@ jobs:
|
||||
|
||||
- name: Prepare version file
|
||||
run: |
|
||||
scripts/generate-build-info.sh ${{ github.sha }}
|
||||
scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
|
||||
cat app/build_info.py
|
||||
|
||||
- name: Build image and publish to Docker Registry
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
|
3
app/.gitignore
vendored
3
app/.gitignore
vendored
@ -11,8 +11,7 @@ db.sqlite-journal
|
||||
static/upload
|
||||
venv/
|
||||
.venv
|
||||
.python-version
|
||||
.coverage
|
||||
htmlcov
|
||||
adhoc
|
||||
.env.*
|
||||
.env.*
|
||||
|
@ -8,7 +8,7 @@ repos:
|
||||
- id: check-yaml
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.3.0
|
||||
rev: v1.34.1
|
||||
hooks:
|
||||
- id: djlint-jinja
|
||||
files: '.*\.html'
|
||||
@ -21,5 +21,4 @@ repos:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- id: ruff-format
|
1
app/.python-version
Normal file
1
app/.python-version
Normal file
@ -0,0 +1 @@
|
||||
3.12.8
|
@ -20,15 +20,15 @@ SimpleLogin backend consists of 2 main components:
|
||||
## Install dependencies
|
||||
|
||||
The project requires:
|
||||
- Python 3.7+ and [poetry](https://python-poetry.org/) to manage dependencies
|
||||
- Python 3.10 and uv to manage dependencies
|
||||
- Node v10 for front-end.
|
||||
- Postgres 12+
|
||||
- Postgres 13+
|
||||
|
||||
First, install all dependencies by running the following command.
|
||||
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
||||
|
||||
```bash
|
||||
poetry install
|
||||
uv sync
|
||||
```
|
||||
|
||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||
@ -55,7 +55,7 @@ brew install -s re2 pybind11
|
||||
We use pre-commit to run all our linting and static analysis checks. Please run
|
||||
|
||||
```bash
|
||||
poetry run pre-commit install
|
||||
uv run pre-commit install
|
||||
```
|
||||
|
||||
To install it in your development environment.
|
||||
@ -68,6 +68,12 @@ For most tests, you will need to have ``redis`` installed and started on your ma
|
||||
sh scripts/run-test.sh
|
||||
```
|
||||
|
||||
You can also run tests using a local Postgres DB to speed things up. This can be done by
|
||||
|
||||
- creating an empty test DB and running the database migration by `dropdb test && createdb test && DB_URI=postgresql://localhost:5432/test alembic upgrade head`
|
||||
|
||||
- replacing the `DB_URI` in `test.env` file by `DB_URI=postgresql://localhost:5432/test`
|
||||
|
||||
## Run the code locally
|
||||
|
||||
Install npm packages
|
||||
@ -151,28 +157,28 @@ Here are the small sum-ups of the directory structures and their roles:
|
||||
|
||||
## Pull request
|
||||
|
||||
The code is formatted using https://github.com/psf/black, to format the code, simply run
|
||||
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||
|
||||
```
|
||||
poetry run black .
|
||||
uv run ruff format .
|
||||
```
|
||||
|
||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||
|
||||
```bash
|
||||
poetry run flake8
|
||||
uv run flake8
|
||||
```
|
||||
|
||||
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||
|
||||
```bash
|
||||
poetry run djlint --check templates
|
||||
uv run djlint --check templates
|
||||
```
|
||||
|
||||
If some files aren't properly formatted, you can format all files with
|
||||
|
||||
```bash
|
||||
poetry run djlint --reformat .
|
||||
uv run djlint --reformat .
|
||||
```
|
||||
|
||||
## Test sending email
|
||||
@ -209,7 +215,7 @@ python email_handler.py
|
||||
4) Send a test email
|
||||
|
||||
```bash
|
||||
swaks --to e1@sl.local --from hey@google.com --server 127.0.0.1:20381
|
||||
swaks --to e1@sl.lan --from hey@google.com --server 127.0.0.1:20381
|
||||
```
|
||||
|
||||
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
||||
@ -217,6 +223,31 @@ Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you sho
|
||||
## Job runner
|
||||
|
||||
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
|
||||
|
||||
```bash
|
||||
python job_runner.py
|
||||
```
|
||||
```
|
||||
|
||||
# Setup for Mac
|
||||
|
||||
There are several ways to setup Python and manage the project dependencies on Mac. For info we have successfully used this setup on a Mac silicon:
|
||||
|
||||
```bash
|
||||
# we haven't managed to make python 3.12 work
|
||||
brew install python3.10
|
||||
|
||||
# make sure to update the PATH so python, pip point to Python3
|
||||
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
||||
|
||||
# Although pipx is the recommended way to install uv,
|
||||
# install pipx via brew will automatically install python 3.12
|
||||
# and uv will then use python 3.12
|
||||
# so we recommend using uv this way instead
|
||||
curl -sSL https://install.python-uv.org | python3 -
|
||||
|
||||
uv install
|
||||
|
||||
# activate the virtualenv and you should be good to go!
|
||||
source .venv/bin/activate
|
||||
|
||||
```
|
||||
|
@ -4,43 +4,47 @@ WORKDIR /code
|
||||
COPY ./static/package*.json /code/static/
|
||||
RUN cd /code/static && npm ci
|
||||
|
||||
# Main image
|
||||
FROM python:3.10
|
||||
FROM --platform=linux/amd64 ubuntu:22.04
|
||||
|
||||
ARG UV_VERSION="0.5.21"
|
||||
ARG UV_HASH="e108c300eafae22ad8e6d94519605530f18f8762eb58d2b98a617edfb5d088fc"
|
||||
|
||||
# Keeps Python from generating .pyc files in the container
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
# Turns off buffering for easier container logging
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
# Add poetry to PATH
|
||||
ENV PATH="${PATH}:/root/.local/bin"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
# Copy poetry files
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
# Copy dependency files
|
||||
COPY pyproject.toml uv.lock .python-version ./
|
||||
|
||||
# Install and setup poetry
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||
# Remove curl and netcat from the image
|
||||
&& apt-get purge -y curl netcat-traditional \
|
||||
# Run poetry
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root \
|
||||
# Clear apt cache \
|
||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||
# Install deps
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev build-essential pkg-config cmake ninja-build bash clang \
|
||||
&& curl -sSL "https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-x86_64-unknown-linux-gnu.tar.gz" > uv.tar.gz \
|
||||
&& echo "${UV_HASH} uv.tar.gz" | sha256sum -c - \
|
||||
&& tar xf uv.tar.gz -C /tmp/ \
|
||||
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uv /usr/bin/uv \
|
||||
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uvx /usr/bin/uvx \
|
||||
&& rm -rf /tmp/uv* \
|
||||
&& rm -f uv.tar.gz \
|
||||
&& uv python install `cat .python-version` \
|
||||
&& uv sync --locked \
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get purge -y curl netcat-traditional build-essential pkg-config cmake ninja-build python3-dev clang\
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy code
|
||||
COPY . .
|
||||
|
||||
# copy npm packages
|
||||
COPY --from=npm /code /code
|
||||
|
||||
# copy everything else into /code
|
||||
COPY . .
|
||||
|
||||
ENV PATH="/code/.venv/bin:$PATH"
|
||||
EXPOSE 7777
|
||||
|
||||
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
|
||||
|
@ -84,7 +84,7 @@ For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplici
|
||||
|
||||
### DNS
|
||||
|
||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain.
|
||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to force using absolute domain.
|
||||
|
||||
|
||||
#### MX record
|
||||
@ -541,7 +541,7 @@ exit
|
||||
|
||||
Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations:
|
||||
|
||||
```
|
||||
```.env
|
||||
DISABLE_REGISTRATION=1
|
||||
DISABLE_ONBOARDING=true
|
||||
```
|
||||
|
@ -7,8 +7,4 @@ If you want be up to date on security patches, make sure your SimpleLogin image
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io.
|
||||
We will review the potential threat and fix it as fast as we can.
|
||||
|
||||
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.
|
||||
|
||||
If you want to report a vulnerability, please take a look at our bug bounty program at https://proton.me/security/bug-bounty.
|
||||
|
@ -3,12 +3,17 @@ from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import sqlalchemy.exc
|
||||
from arrow import Arrow
|
||||
from newrelic import agent
|
||||
from psycopg2.errors import UniqueViolation
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||
from app.partner_user_utils import create_partner_user, create_partner_subscription
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.errors import (
|
||||
AccountAlreadyLinkedToAnotherPartnerException,
|
||||
@ -23,12 +28,14 @@ from app.models import (
|
||||
User,
|
||||
Alias,
|
||||
)
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
class SLPlanType(Enum):
|
||||
Free = 1
|
||||
Premium = 2
|
||||
PremiumLifetime = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -52,8 +59,26 @@ class LinkResult:
|
||||
strategy: str
|
||||
|
||||
|
||||
def send_user_plan_changed_event(
|
||||
partner_user: PartnerUser,
|
||||
) -> UserPlanChanged:
|
||||
subscription_end = partner_user.user.get_active_subscription_end(
|
||||
include_partner_subscription=False
|
||||
)
|
||||
if partner_user.user.lifetime:
|
||||
event = UserPlanChanged(lifetime=True)
|
||||
elif subscription_end:
|
||||
event = UserPlanChanged(plan_end_time=subscription_end.timestamp)
|
||||
else:
|
||||
event = UserPlanChanged(plan_end_time=None)
|
||||
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
||||
Session.flush()
|
||||
return event
|
||||
|
||||
|
||||
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||
is_lifetime = plan.type == SLPlanType.PremiumLifetime
|
||||
if plan.type == SLPlanType.Free:
|
||||
if sub is not None:
|
||||
LOG.i(
|
||||
@ -62,24 +87,37 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||
PartnerSubscription.delete(sub.id)
|
||||
agent.record_custom_event("PlanChange", {"plan": "free"})
|
||||
else:
|
||||
end_time = plan.expiration
|
||||
if plan.type == SLPlanType.PremiumLifetime:
|
||||
end_time = None
|
||||
if sub is None:
|
||||
LOG.i(
|
||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] with {end_time} / {is_lifetime}"
|
||||
)
|
||||
PartnerSubscription.create(
|
||||
partner_user_id=partner_user.id,
|
||||
end_at=plan.expiration,
|
||||
create_partner_subscription(
|
||||
partner_user=partner_user,
|
||||
expiration=end_time,
|
||||
lifetime=is_lifetime,
|
||||
msg="Upgraded via partner. User did not have a previous partner subscription",
|
||||
)
|
||||
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
||||
else:
|
||||
if sub.end_at != plan.expiration:
|
||||
LOG.i(
|
||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
)
|
||||
if sub.end_at != plan.expiration or sub.lifetime != is_lifetime:
|
||||
agent.record_custom_event(
|
||||
"PlanChange", {"plan": "premium", "type": "extension"}
|
||||
)
|
||||
sub.end_at = plan.expiration
|
||||
sub.end_at = plan.expiration if not is_lifetime else None
|
||||
sub.lifetime = is_lifetime
|
||||
LOG.i(
|
||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] to {sub.end_at} / {sub.lifetime} "
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended partner subscription",
|
||||
)
|
||||
Session.flush()
|
||||
send_user_plan_changed_event(partner_user)
|
||||
Session.commit()
|
||||
|
||||
|
||||
@ -98,12 +136,13 @@ def ensure_partner_user_exists_for_user(
|
||||
if res and res.partner_id != partner.id:
|
||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||
if not res:
|
||||
res = PartnerUser.create(
|
||||
user_id=sl_user.id,
|
||||
res = create_partner_user(
|
||||
user=sl_user,
|
||||
partner_id=partner.id,
|
||||
partner_email=link_request.email,
|
||||
external_user_id=link_request.external_user_id,
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
LOG.i(
|
||||
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
||||
@ -131,17 +170,59 @@ class ClientMergeStrategy(ABC):
|
||||
|
||||
class NewUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# Will create a new SL User with a random password
|
||||
canonical_email = canonicalize_email(self.link_request.email)
|
||||
new_user = User.create(
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
from_partner=self.link_request.from_partner,
|
||||
try:
|
||||
# Will create a new SL User with a random password
|
||||
new_user = User.create(
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
from_partner=self.link_request.from_partner,
|
||||
)
|
||||
self.create_partner_user(new_user)
|
||||
Session.commit()
|
||||
|
||||
if not new_user.created_by_partner:
|
||||
send_welcome_email(new_user)
|
||||
|
||||
agent.record_custom_event(
|
||||
"PartnerUserCreation", {"partner": self.partner.name}
|
||||
)
|
||||
|
||||
return LinkResult(
|
||||
user=new_user,
|
||||
strategy=self.__class__.__name__,
|
||||
)
|
||||
except (UniqueViolation, sqlalchemy.exc.IntegrityError) as e:
|
||||
Session.rollback()
|
||||
LOG.debug(f"Got the duplicate user error: {e}")
|
||||
return self.create_missing_link(canonical_email)
|
||||
|
||||
def create_missing_link(self, canonical_email: str):
|
||||
# If there's a unique key violation due to race conditions try to create only the partner if needed
|
||||
partner_user = PartnerUser.get_by(
|
||||
external_user_id=self.link_request.external_user_id,
|
||||
partner_id=self.partner.id,
|
||||
)
|
||||
partner_user = PartnerUser.create(
|
||||
user_id=new_user.id,
|
||||
if partner_user is None:
|
||||
# Get the user by canonical email and if not by normal email
|
||||
user = User.get_by(email=canonical_email) or User.get_by(
|
||||
email=self.link_request.email
|
||||
)
|
||||
if not user:
|
||||
raise RuntimeError(
|
||||
"Tried to create only partner on UniqueViolation but cannot find the user"
|
||||
)
|
||||
partner_user = self.create_partner_user(user)
|
||||
Session.commit()
|
||||
return LinkResult(
|
||||
user=partner_user.user, strategy=ExistingUnlinkedUserStrategy.__name__
|
||||
)
|
||||
|
||||
def create_partner_user(self, new_user: User):
|
||||
partner_user = create_partner_user(
|
||||
user=new_user,
|
||||
partner_id=self.partner.id,
|
||||
external_user_id=self.link_request.external_user_id,
|
||||
partner_email=self.link_request.email,
|
||||
@ -153,17 +234,7 @@ class NewUserStrategy(ClientMergeStrategy):
|
||||
partner_user,
|
||||
self.link_request.plan,
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if not new_user.created_by_partner:
|
||||
send_welcome_email(new_user)
|
||||
|
||||
agent.record_custom_event("PartnerUserCreation", {"partner": self.partner.name})
|
||||
|
||||
return LinkResult(
|
||||
user=new_user,
|
||||
strategy=self.__class__.__name__,
|
||||
)
|
||||
return partner_user
|
||||
|
||||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
@ -200,7 +271,7 @@ def get_login_strategy(
|
||||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||
|
||||
|
||||
def check_alias(email: str) -> bool:
|
||||
def check_alias(email: str):
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias is not None:
|
||||
raise AccountIsUsingAliasAsEmail()
|
||||
@ -275,10 +346,26 @@ def switch_already_linked_user(
|
||||
LOG.i(
|
||||
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
||||
)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=other_partner_user.user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"Deleting partner_user {other_partner_user.id} (external_user_id={other_partner_user.external_user_id} | partner_email={other_partner_user.partner_email}) from user {current_user.id}, as we received a new link request for the same partner",
|
||||
)
|
||||
PartnerUser.delete(other_partner_user.id)
|
||||
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
||||
# Link this partner_user to the current user
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"Unlinking from partner, as user will now be tied to another external account. old=(id={partner_user.user.id} | email={partner_user.user.email}) | new=(id={current_user.id} | email={current_user.email})",
|
||||
)
|
||||
partner_user.user_id = current_user.id
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.LinkAccount,
|
||||
message=f"Linking user {current_user.id} ({current_user.email}) to partner_user:{partner_user.id} (external_user_id={partner_user.external_user_id} | partner_email={partner_user.partner_email})",
|
||||
)
|
||||
# Set plan
|
||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||
Session.commit()
|
||||
|
@ -1,18 +1,29 @@
|
||||
from typing import Optional
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, List
|
||||
|
||||
import arrow
|
||||
import sqlalchemy
|
||||
from flask_admin.model.template import EndpointLinkRowAction
|
||||
from markupsafe import Markup
|
||||
|
||||
from app import models, s3
|
||||
from flask import redirect, url_for, request, flash, Response
|
||||
from flask_admin import BaseView
|
||||
from flask_admin import expose, AdminIndexView
|
||||
from flask_admin.actions import action
|
||||
from flask_admin.contrib import sqla
|
||||
from flask_admin.form import SecureForm
|
||||
from flask_admin.model.template import EndpointLinkRowAction
|
||||
from flask_login import current_user
|
||||
from markupsafe import Markup
|
||||
|
||||
from app import models, s3, config
|
||||
from app.custom_domain_validation import (
|
||||
CustomDomainValidation,
|
||||
DomainValidationResult,
|
||||
ExpectedValidationRecords,
|
||||
)
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_network_dns_client
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||
from app.models import (
|
||||
User,
|
||||
ManualSubscription,
|
||||
@ -27,8 +38,31 @@ from app.models import (
|
||||
Alias,
|
||||
Newsletter,
|
||||
PADDLE_SUBSCRIPTION_GRACE_DAYS,
|
||||
Mailbox,
|
||||
DeletedAlias,
|
||||
DomainDeletedAlias,
|
||||
PartnerUser,
|
||||
AliasMailbox,
|
||||
AliasAuditLog,
|
||||
UserAuditLog,
|
||||
CustomDomain,
|
||||
)
|
||||
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
||||
from app.proton.proton_unlink import perform_proton_account_unlink
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def _admin_action_formatter(view, context, model, name):
|
||||
action_name = AuditLogActionEnum.get_name(model.action)
|
||||
return "{} ({})".format(action_name, model.action)
|
||||
|
||||
|
||||
def _admin_date_formatter(view, context, model, name):
|
||||
return model.created_at.format()
|
||||
|
||||
|
||||
def _user_upgrade_channel_formatter(view, context, model, name):
|
||||
return Markup(model.upgrade_channel)
|
||||
|
||||
|
||||
class SLModelView(sqla.ModelView):
|
||||
@ -46,7 +80,8 @@ class SLModelView(sqla.ModelView):
|
||||
|
||||
def inaccessible_callback(self, name, **kwargs):
|
||||
# redirect to login page if user doesn't have access
|
||||
return redirect(url_for("auth.login", next=request.url))
|
||||
flash("You don't have access to the admin page", "error")
|
||||
return redirect(url_for("dashboard.index", next=request.url))
|
||||
|
||||
def on_model_change(self, form, model, is_created):
|
||||
changes = {}
|
||||
@ -91,14 +126,11 @@ class SLAdminIndexView(AdminIndexView):
|
||||
if not current_user.is_authenticated or not current_user.is_admin:
|
||||
return redirect(url_for("auth.login", next=request.url))
|
||||
|
||||
return redirect("/admin/user")
|
||||
|
||||
|
||||
def _user_upgrade_channel_formatter(view, context, model, name):
|
||||
return Markup(model.upgrade_channel)
|
||||
return redirect(url_for("admin.email_search.index"))
|
||||
|
||||
|
||||
class UserAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["email", "id"]
|
||||
column_exclude_list = [
|
||||
"salt",
|
||||
@ -117,6 +149,8 @@ class UserAdmin(SLModelView):
|
||||
|
||||
column_formatters = {
|
||||
"upgrade_channel": _user_upgrade_channel_formatter,
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
@ -328,32 +362,69 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
|
||||
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
||||
else:
|
||||
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} extended manual subscription to user {user.email}",
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=manual_sub.end_at.timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
||||
continue
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} created manual subscription to user {user.email}",
|
||||
)
|
||||
manual_sub = ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=1, days=1),
|
||||
comment=way,
|
||||
is_giveaway=is_giveaway,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=manual_sub.end_at.timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=1, days=1),
|
||||
comment=way,
|
||||
is_giveaway=is_giveaway,
|
||||
)
|
||||
|
||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||
Session.commit()
|
||||
|
||||
|
||||
class EmailLogAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id"]
|
||||
column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"]
|
||||
|
||||
can_edit = False
|
||||
can_create = False
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
class AliasAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.email", "email", "mailbox.email"]
|
||||
column_filters = ["id", "user.email", "email", "mailbox.email"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
"disable_email_spoofing_check",
|
||||
"Disable email spoofing protection",
|
||||
@ -376,9 +447,15 @@ class AliasAdmin(SLModelView):
|
||||
|
||||
|
||||
class MailboxAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.email", "email"]
|
||||
column_filters = ["id", "user.email", "email"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
# class LifetimeCouponAdmin(SLModelView):
|
||||
# can_edit = True
|
||||
@ -386,28 +463,33 @@ class MailboxAdmin(SLModelView):
|
||||
|
||||
|
||||
class CouponAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
can_edit = False
|
||||
can_create = True
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
class ManualSubscriptionAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
can_edit = True
|
||||
column_searchable_list = ["id", "user.email"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
"extend_1y",
|
||||
"Extend for 1 year",
|
||||
"Extend 1 year more?",
|
||||
)
|
||||
def extend_1y(self, ids):
|
||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||
ms.end_at = ms.end_at.shift(years=1)
|
||||
flash(f"Extend subscription for 1 year for {ms.user}", "success")
|
||||
AdminAuditLog.extend_subscription(
|
||||
current_user.id, ms.user.id, ms.end_at, "1 year"
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
self.__extend_manual_subscription(ids, msg="1 year", years=1)
|
||||
|
||||
@action(
|
||||
"extend_1m",
|
||||
@ -415,11 +497,26 @@ class ManualSubscriptionAdmin(SLModelView):
|
||||
"Extend 1 month more?",
|
||||
)
|
||||
def extend_1m(self, ids):
|
||||
self.__extend_manual_subscription(ids, msg="1 month", months=1)
|
||||
|
||||
def __extend_manual_subscription(self, ids: List[int], msg: str, **kwargs):
|
||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||
ms.end_at = ms.end_at.shift(months=1)
|
||||
flash(f"Extend subscription for 1 month for {ms.user}", "success")
|
||||
sub: ManualSubscription = ms
|
||||
sub.end_at = sub.end_at.shift(**kwargs)
|
||||
flash(f"Extend subscription for {msg} for {sub.user}", "success")
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} extended manual subscription for {msg} for {sub.user}",
|
||||
)
|
||||
AdminAuditLog.extend_subscription(
|
||||
current_user.id, ms.user.id, ms.end_at, "1 month"
|
||||
current_user.id, sub.user.id, sub.end_at, msg
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=sub.user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||
),
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
@ -432,15 +529,27 @@ class ManualSubscriptionAdmin(SLModelView):
|
||||
|
||||
|
||||
class CustomDomainAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["domain", "user.email", "user.id"]
|
||||
column_exclude_list = ["ownership_txt_token"]
|
||||
can_edit = False
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
class ReferralAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.email", "code", "name"]
|
||||
column_filters = ["id", "user.email", "code", "name"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
def scaffold_list_columns(self):
|
||||
ret = super().scaffold_list_columns()
|
||||
ret.insert(0, "nb_user")
|
||||
@ -456,16 +565,8 @@ class ReferralAdmin(SLModelView):
|
||||
# can_delete = True
|
||||
|
||||
|
||||
def _admin_action_formatter(view, context, model, name):
|
||||
action_name = AuditLogActionEnum.get_name(model.action)
|
||||
return "{} ({})".format(action_name, model.action)
|
||||
|
||||
|
||||
def _admin_created_at_formatter(view, context, model, name):
|
||||
return model.created_at.format()
|
||||
|
||||
|
||||
class AdminAuditLogAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"]
|
||||
column_filters = ["admin.id", "admin.email", "model_id", "created_at"]
|
||||
column_exclude_list = ["id"]
|
||||
@ -476,7 +577,8 @@ class AdminAuditLogAdmin(SLModelView):
|
||||
|
||||
column_formatters = {
|
||||
"action": _admin_action_formatter,
|
||||
"created_at": _admin_created_at_formatter,
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
@ -496,6 +598,7 @@ def _transactionalcomplaint_refused_email_id_formatter(view, context, model, nam
|
||||
|
||||
|
||||
class ProviderComplaintAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.id", "created_at"]
|
||||
column_filters = ["user.id", "state"]
|
||||
column_hide_backrefs = False
|
||||
@ -504,8 +607,8 @@ class ProviderComplaintAdmin(SLModelView):
|
||||
can_delete = False
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_created_at_formatter,
|
||||
"updated_at": _admin_created_at_formatter,
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
"state": _transactionalcomplaint_state_formatter,
|
||||
"phase": _transactionalcomplaint_phase_formatter,
|
||||
"refused_email": _transactionalcomplaint_refused_email_id_formatter,
|
||||
@ -566,6 +669,7 @@ def _newsletter_html_formatter(view, context, model: Newsletter, name):
|
||||
|
||||
|
||||
class NewsletterAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
list_template = "admin/model/newsletter-list.html"
|
||||
edit_template = "admin/model/newsletter-edit.html"
|
||||
edit_modal = False
|
||||
@ -647,6 +751,7 @@ class NewsletterAdmin(SLModelView):
|
||||
|
||||
|
||||
class NewsletterUserAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id"]
|
||||
column_filters = ["id", "user.email", "newsletter.subject"]
|
||||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
@ -656,17 +761,303 @@ class NewsletterUserAdmin(SLModelView):
|
||||
|
||||
|
||||
class DailyMetricAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
|
||||
can_export = True
|
||||
|
||||
|
||||
class MetricAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
|
||||
can_export = True
|
||||
|
||||
|
||||
class InvalidMailboxDomainAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
can_create = True
|
||||
can_delete = True
|
||||
|
||||
|
||||
class EmailSearchResult:
|
||||
def __init__(self):
|
||||
self.no_match: bool = True
|
||||
self.alias: Optional[Alias] = None
|
||||
self.alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
self.mailbox: List[Mailbox] = []
|
||||
self.mailbox_count: int = 0
|
||||
self.deleted_alias: Optional[DeletedAlias] = None
|
||||
self.deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
self.domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
||||
self.domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
self.user: Optional[User] = None
|
||||
self.user_audit_log: Optional[List[UserAuditLog]] = None
|
||||
self.query: str
|
||||
|
||||
@staticmethod
|
||||
def from_request_email(email: str) -> EmailSearchResult:
|
||||
output = EmailSearchResult()
|
||||
output.query = email
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias:
|
||||
output.alias = alias
|
||||
output.alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_id=alias.id)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
try:
|
||||
user_id = int(email)
|
||||
user = User.get(user_id)
|
||||
except ValueError:
|
||||
user = User.get_by(email=email)
|
||||
if user:
|
||||
output.user = user
|
||||
output.user_audit_log = (
|
||||
UserAuditLog.filter_by(user_id=user.id)
|
||||
.order_by(UserAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
|
||||
user_audit_log = (
|
||||
UserAuditLog.filter_by(user_email=email)
|
||||
.order_by(UserAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
if user_audit_log:
|
||||
output.user_audit_log = user_audit_log
|
||||
output.no_match = False
|
||||
mailboxes = (
|
||||
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
||||
)
|
||||
if mailboxes:
|
||||
output.mailbox = mailboxes
|
||||
output.mailbox_count = Mailbox.filter_by(email=email).count()
|
||||
output.no_match = False
|
||||
deleted_alias = DeletedAlias.get_by(email=email)
|
||||
if deleted_alias:
|
||||
output.deleted_alias = deleted_alias
|
||||
output.deleted_alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_email=deleted_alias.email)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
||||
if domain_deleted_alias:
|
||||
output.domain_deleted_alias = domain_deleted_alias
|
||||
output.domain_deleted_alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_email=domain_deleted_alias.email)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
return output
|
||||
|
||||
|
||||
class EmailSearchHelpers:
|
||||
@staticmethod
|
||||
def mailbox_list(user: User) -> list[Mailbox]:
|
||||
return (
|
||||
Mailbox.filter_by(user_id=user.id)
|
||||
.order_by(Mailbox.id.asc())
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def mailbox_count(user: User) -> int:
|
||||
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
||||
|
||||
@staticmethod
|
||||
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
|
||||
return (
|
||||
Session.query(Mailbox)
|
||||
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
|
||||
.union(
|
||||
Session.query(Mailbox)
|
||||
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
|
||||
.filter(AliasMailbox.alias_id == alias.id)
|
||||
)
|
||||
.order_by(Mailbox.id)
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def alias_mailbox_count(alias: Alias) -> int:
|
||||
return len(alias.mailboxes)
|
||||
|
||||
@staticmethod
|
||||
def alias_list(user: User) -> list[Alias]:
|
||||
return (
|
||||
Alias.filter_by(user_id=user.id).order_by(Alias.id.desc()).limit(10).all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def alias_count(user: User) -> int:
|
||||
return Alias.filter_by(user_id=user.id).count()
|
||||
|
||||
@staticmethod
|
||||
def partner_user(user: User) -> Optional[PartnerUser]:
|
||||
return PartnerUser.get_by(user_id=user.id)
|
||||
|
||||
|
||||
class EmailSearchAdmin(BaseView):
|
||||
def is_accessible(self):
|
||||
return current_user.is_authenticated and current_user.is_admin
|
||||
|
||||
def inaccessible_callback(self, name, **kwargs):
|
||||
# redirect to login page if user doesn't have access
|
||||
flash("You don't have access to the admin page", "error")
|
||||
return redirect(url_for("dashboard.index", next=request.url))
|
||||
|
||||
@expose("/", methods=["GET", "POST"])
|
||||
def index(self):
|
||||
search = EmailSearchResult()
|
||||
email = request.args.get("query")
|
||||
if email is not None and len(email) > 0:
|
||||
email = email.strip()
|
||||
search = EmailSearchResult.from_request_email(email)
|
||||
|
||||
return self.render(
|
||||
"admin/email_search.html",
|
||||
email=email,
|
||||
data=search,
|
||||
helper=EmailSearchHelpers,
|
||||
)
|
||||
|
||||
@expose("/partner_unlink", methods=["POST"])
|
||||
def delete_partner_link(self):
|
||||
user_id = request.form.get("user_id")
|
||||
if not user_id:
|
||||
flash("Missing user_id", "error")
|
||||
return redirect(url_for("admin.email_search.index"))
|
||||
try:
|
||||
user_id = int(user_id)
|
||||
except ValueError:
|
||||
flash("Missing user_id", "error")
|
||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||
user = User.get(user_id)
|
||||
if user is None:
|
||||
flash("User not found", "error")
|
||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||
external_user_id = perform_proton_account_unlink(user, skip_check=True)
|
||||
if not external_user_id:
|
||||
flash("User unlinked", "success")
|
||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||
|
||||
AdminAuditLog.create(
|
||||
admin_user_id=user.id,
|
||||
model=User.__class__.__name__,
|
||||
model_id=user.id,
|
||||
action=AuditLogActionEnum.unlink_user.value,
|
||||
data={"external_user_id": external_user_id},
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||
|
||||
|
||||
class CustomDomainWithValidationData:
|
||||
def __init__(self, domain: CustomDomain):
|
||||
self.domain: CustomDomain = domain
|
||||
self.ownership_expected: Optional[ExpectedValidationRecords] = None
|
||||
self.ownership_validation: Optional[DomainValidationResult] = None
|
||||
self.mx_expected: Optional[dict[int, ExpectedValidationRecords]] = None
|
||||
self.mx_validation: Optional[DomainValidationResult] = None
|
||||
self.spf_expected: Optional[ExpectedValidationRecords] = None
|
||||
self.spf_validation: Optional[DomainValidationResult] = None
|
||||
self.dkim_expected: {str: ExpectedValidationRecords} = {}
|
||||
self.dkim_validation: {str: str} = {}
|
||||
|
||||
|
||||
class CustomDomainSearchResult:
|
||||
def __init__(self):
|
||||
self.no_match: bool = False
|
||||
self.user: Optional[User] = None
|
||||
self.domains: list[CustomDomainWithValidationData] = []
|
||||
|
||||
@staticmethod
|
||||
def from_user(user: Optional[User]) -> CustomDomainSearchResult:
|
||||
out = CustomDomainSearchResult()
|
||||
if user is None:
|
||||
out.no_match = True
|
||||
return out
|
||||
out.user = user
|
||||
dns_client = get_network_dns_client()
|
||||
validator = CustomDomainValidation(
|
||||
dkim_domain=config.EMAIL_DOMAIN,
|
||||
partner_domains=config.PARTNER_DNS_CUSTOM_DOMAINS,
|
||||
partner_domains_validation_prefixes=config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES,
|
||||
dns_client=dns_client,
|
||||
)
|
||||
for custom_domain in user.custom_domains:
|
||||
validation_data = CustomDomainWithValidationData(custom_domain)
|
||||
if not custom_domain.ownership_verified:
|
||||
validation_data.ownership_expected = (
|
||||
validator.get_ownership_verification_record(custom_domain)
|
||||
)
|
||||
validation_data.ownership_validation = (
|
||||
validator.validate_domain_ownership(custom_domain)
|
||||
)
|
||||
if not custom_domain.verified:
|
||||
validation_data.mx_expected = validator.get_expected_mx_records(
|
||||
custom_domain
|
||||
)
|
||||
validation_data.mx_validation = validator.validate_mx_records(
|
||||
custom_domain
|
||||
)
|
||||
if not custom_domain.spf_verified:
|
||||
validation_data.spf_expected = validator.get_expected_spf_record(
|
||||
custom_domain
|
||||
)
|
||||
validation_data.spf_validation = validator.validate_spf_records(
|
||||
custom_domain
|
||||
)
|
||||
if not custom_domain.dkim_verified:
|
||||
validation_data.dkim_expected = validator.get_dkim_records(
|
||||
custom_domain
|
||||
)
|
||||
validation_data.dkim_validation = validator.validate_dkim_records(
|
||||
custom_domain
|
||||
)
|
||||
out.domains.append(validation_data)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
class CustomDomainSearchAdmin(BaseView):
|
||||
def is_accessible(self):
|
||||
return current_user.is_authenticated and current_user.is_admin
|
||||
|
||||
def inaccessible_callback(self, name, **kwargs):
|
||||
# redirect to login page if user doesn't have access
|
||||
flash("You don't have access to the admin page", "error")
|
||||
return redirect(url_for("dashboard.index", next=request.url))
|
||||
|
||||
@expose("/", methods=["GET", "POST"])
|
||||
def index(self):
|
||||
query = request.args.get("user")
|
||||
if query is None:
|
||||
search = CustomDomainSearchResult()
|
||||
else:
|
||||
try:
|
||||
user_id = int(query)
|
||||
user = User.get_by(id=user_id)
|
||||
except ValueError:
|
||||
user = User.get_by(email=query)
|
||||
if user is None:
|
||||
cd = CustomDomain.get_by(domain=query)
|
||||
if cd is not None:
|
||||
user = cd.user
|
||||
search = CustomDomainSearchResult.from_user(user)
|
||||
|
||||
return self.render(
|
||||
"admin/custom_domain_search.html",
|
||||
data=search,
|
||||
query=query,
|
||||
)
|
||||
|
38
app/app/alias_audit_log_utils.py
Normal file
38
app/app/alias_audit_log_utils.py
Normal file
@ -0,0 +1,38 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from app.models import Alias, AliasAuditLog
|
||||
|
||||
|
||||
class AliasAuditLogAction(Enum):
|
||||
CreateAlias = "create"
|
||||
ChangeAliasStatus = "change_status"
|
||||
DeleteAlias = "delete"
|
||||
UpdateAlias = "update"
|
||||
|
||||
InitiateTransferAlias = "initiate_transfer_alias"
|
||||
AcceptTransferAlias = "accept_transfer_alias"
|
||||
TransferredAlias = "transferred_alias"
|
||||
|
||||
ChangedMailboxes = "changed_mailboxes"
|
||||
|
||||
CreateContact = "create_contact"
|
||||
UpdateContact = "update_contact"
|
||||
DeleteContact = "delete_contact"
|
||||
|
||||
|
||||
def emit_alias_audit_log(
|
||||
alias: Alias,
|
||||
action: AliasAuditLogAction,
|
||||
message: str,
|
||||
user_id: Optional[int] = None,
|
||||
commit: bool = False,
|
||||
):
|
||||
AliasAuditLog.create(
|
||||
user_id=user_id or alias.user_id,
|
||||
alias_id=alias.id,
|
||||
alias_email=alias.email,
|
||||
action=action.value,
|
||||
message=message,
|
||||
commit=commit,
|
||||
)
|
62
app/app/alias_mailbox_utils.py
Normal file
62
app/app/alias_mailbox_utils.py
Normal file
@ -0,0 +1,62 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.db import Session
|
||||
from app.models import Alias, AliasMailbox, Mailbox
|
||||
|
||||
_MAX_MAILBOXES_PER_ALIAS = 20
|
||||
|
||||
|
||||
class CannotSetMailboxesForAliasCause(Enum):
|
||||
Forbidden = "Forbidden"
|
||||
EmptyMailboxes = "Must choose at least one mailbox"
|
||||
TooManyMailboxes = "Too many mailboxes"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetMailboxesForAliasResult:
|
||||
performed_change: bool
|
||||
reason: Optional[CannotSetMailboxesForAliasCause]
|
||||
|
||||
|
||||
def set_mailboxes_for_alias(
|
||||
user_id: int, alias: Alias, mailbox_ids: List[int]
|
||||
) -> Optional[CannotSetMailboxesForAliasCause]:
|
||||
if len(mailbox_ids) == 0:
|
||||
return CannotSetMailboxesForAliasCause.EmptyMailboxes
|
||||
if len(mailbox_ids) > _MAX_MAILBOXES_PER_ALIAS:
|
||||
return CannotSetMailboxesForAliasCause.TooManyMailboxes
|
||||
|
||||
mailboxes = (
|
||||
Session.query(Mailbox)
|
||||
.filter(
|
||||
Mailbox.id.in_(mailbox_ids),
|
||||
Mailbox.user_id == user_id,
|
||||
Mailbox.verified == True, # noqa: E712
|
||||
)
|
||||
.order_by(Mailbox.id.asc())
|
||||
.all()
|
||||
)
|
||||
if len(mailboxes) != len(mailbox_ids):
|
||||
return CannotSetMailboxesForAliasCause.Forbidden
|
||||
|
||||
# first remove all existing alias-mailboxes links
|
||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||
Session.flush()
|
||||
|
||||
# then add all new mailboxes, being the first the one associated with the alias
|
||||
for i, mailbox in enumerate(mailboxes):
|
||||
if i == 0:
|
||||
alias.mailbox_id = mailboxes[0].id
|
||||
else:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.ChangedMailboxes,
|
||||
message=",".join([f"{mailbox.id} ({mailbox.email})" for mailbox in mailboxes]),
|
||||
)
|
||||
|
||||
return None
|
@ -58,32 +58,34 @@ def verify_prefix_suffix(
|
||||
|
||||
# alias_domain must be either one of user custom domains or built-in domains
|
||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
# SimpleLogin domain case:
|
||||
# 1) alias_suffix must start with "." and
|
||||
# 2) alias_domain_prefix must come from the word list
|
||||
available_sl_domains = [
|
||||
sl_domain.domain
|
||||
for sl_domain in user.get_sl_domains(alias_options=alias_options)
|
||||
]
|
||||
if (
|
||||
alias_domain in user.available_sl_domains(alias_options=alias_options)
|
||||
alias_domain in available_sl_domains
|
||||
and alias_domain not in user_custom_domains
|
||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||
and not config.DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
LOG.i("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
|
||||
else:
|
||||
if alias_domain not in user_custom_domains:
|
||||
if not config.DISABLE_ALIAS_SUFFIX:
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
if alias_domain not in user.available_sl_domains(
|
||||
alias_options=alias_options
|
||||
):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
if alias_domain not in available_sl_domains:
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -1,12 +1,14 @@
|
||||
import csv
|
||||
from io import StringIO
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from sqlalchemy.exc import IntegrityError, DataError
|
||||
from flask import make_response
|
||||
|
||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||
from app.config import (
|
||||
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
||||
BOUNCE_PREFIX,
|
||||
@ -23,11 +25,20 @@ from app.email_utils import (
|
||||
send_cannot_create_domain_alias,
|
||||
send_email,
|
||||
render,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.errors import AliasInTrashError
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import (
|
||||
AliasDeleted,
|
||||
AliasStatusChanged,
|
||||
EventContent,
|
||||
AliasCreated,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
AliasDeleteReason,
|
||||
CustomDomain,
|
||||
Directory,
|
||||
User,
|
||||
@ -56,12 +67,16 @@ def get_user_if_alias_would_auto_create(
|
||||
# Prevent addresses with unicode characters (🤯) in them for now.
|
||||
validate_email(address, check_deliverability=False, allow_smtputf8=False)
|
||||
except EmailNotValidError:
|
||||
LOG.i(f"Not creating alias for {address} because email is invalid")
|
||||
return None
|
||||
|
||||
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
address, notify_user=notify_user
|
||||
)
|
||||
if DomainDeletedAlias.get_by(email=address):
|
||||
LOG.i(
|
||||
f"Not creating alias for {address} because it was previously deleted for this domain"
|
||||
)
|
||||
return None
|
||||
if domain_and_rule:
|
||||
return domain_and_rule[0].user
|
||||
@ -86,6 +101,9 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain)
|
||||
|
||||
if not custom_domain:
|
||||
LOG.i(
|
||||
f"Cannot auto-create custom domain alias for {address} because there's no custom domain for {alias_domain}"
|
||||
)
|
||||
return None
|
||||
|
||||
user: User = custom_domain.user
|
||||
@ -101,6 +119,9 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
|
||||
if not custom_domain.catch_all:
|
||||
if len(custom_domain.auto_create_rules) == 0:
|
||||
LOG.i(
|
||||
f"Cannot create alias {address} for domain {custom_domain} because it has no catch-all and no rules"
|
||||
)
|
||||
return None
|
||||
local = get_email_local_part(address)
|
||||
|
||||
@ -114,7 +135,7 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
)
|
||||
return custom_domain, rule
|
||||
else: # no rule passes
|
||||
LOG.d("no rule passed to create %s", local)
|
||||
LOG.d(f"No rule matches auto-create {address} for domain {custom_domain}")
|
||||
return None
|
||||
LOG.d("Create alias via catchall")
|
||||
|
||||
@ -141,6 +162,7 @@ def check_if_alias_can_be_auto_created_for_a_directory(
|
||||
sep = "#"
|
||||
else:
|
||||
# if there's no directory separator in the alias, no way to auto-create it
|
||||
LOG.info(f"Cannot auto-create {address} since it has no directory separator")
|
||||
return None
|
||||
|
||||
directory_name = address[: address.find(sep)]
|
||||
@ -148,6 +170,9 @@ def check_if_alias_can_be_auto_created_for_a_directory(
|
||||
|
||||
directory = Directory.get_by(name=directory_name)
|
||||
if not directory:
|
||||
LOG.info(
|
||||
f"Cannot auto-create {address} because there is no directory for {directory_name}"
|
||||
)
|
||||
return None
|
||||
|
||||
user: User = directory.user
|
||||
@ -156,12 +181,17 @@ def check_if_alias_can_be_auto_created_for_a_directory(
|
||||
return None
|
||||
|
||||
if not user.can_create_new_alias():
|
||||
LOG.d(f"{user} can't create new directory alias {address}")
|
||||
LOG.d(
|
||||
f"{user} can't create new directory alias {address} because user cannot create aliases"
|
||||
)
|
||||
if notify_user:
|
||||
send_cannot_create_directory_alias(user, address, directory_name)
|
||||
return None
|
||||
|
||||
if directory.disabled:
|
||||
LOG.d(
|
||||
f"{user} can't create new directory alias {address} bcause directory is disabled"
|
||||
)
|
||||
if notify_user:
|
||||
send_cannot_create_directory_alias_disabled(user, address, directory_name)
|
||||
return None
|
||||
@ -303,36 +333,56 @@ def try_auto_create_via_domain(address: str) -> Optional[Alias]:
|
||||
return None
|
||||
|
||||
|
||||
def delete_alias(alias: Alias, user: User):
|
||||
def delete_alias(
|
||||
alias: Alias,
|
||||
user: User,
|
||||
reason: AliasDeleteReason = AliasDeleteReason.Unspecified,
|
||||
commit: bool = False,
|
||||
):
|
||||
"""
|
||||
Delete an alias and add it to either global or domain trash
|
||||
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
|
||||
"""
|
||||
# save deleted alias to either global or domain trash
|
||||
LOG.i(f"User {user} has deleted alias {alias}")
|
||||
# save deleted alias to either global or domain tra
|
||||
if alias.custom_domain_id:
|
||||
if not DomainDeletedAlias.get_by(
|
||||
email=alias.email, domain_id=alias.custom_domain_id
|
||||
):
|
||||
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
|
||||
Session.add(
|
||||
DomainDeletedAlias(
|
||||
user_id=user.id,
|
||||
email=alias.email,
|
||||
domain_id=alias.custom_domain_id,
|
||||
)
|
||||
domain_deleted_alias = DomainDeletedAlias(
|
||||
user_id=user.id,
|
||||
email=alias.email,
|
||||
domain_id=alias.custom_domain_id,
|
||||
reason=reason,
|
||||
)
|
||||
Session.add(domain_deleted_alias)
|
||||
Session.commit()
|
||||
|
||||
LOG.i(
|
||||
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
|
||||
)
|
||||
else:
|
||||
if not DeletedAlias.get_by(email=alias.email):
|
||||
LOG.d("add %s to global trash", alias)
|
||||
Session.add(DeletedAlias(email=alias.email))
|
||||
deleted_alias = DeletedAlias(email=alias.email, reason=reason)
|
||||
Session.add(deleted_alias)
|
||||
Session.commit()
|
||||
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
|
||||
|
||||
LOG.i("delete alias %s", alias)
|
||||
alias_id = alias.id
|
||||
alias_email = alias.email
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias, AliasAuditLogAction.DeleteAlias, "Alias deleted by user action"
|
||||
)
|
||||
Alias.filter(Alias.id == alias.id).delete()
|
||||
Session.commit()
|
||||
|
||||
EventDispatcher.send_event(
|
||||
user,
|
||||
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
|
||||
)
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
|
||||
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
|
||||
"""
|
||||
@ -405,7 +455,7 @@ def alias_export_csv(user, csv_direct_export=False):
|
||||
return output
|
||||
|
||||
|
||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
@ -442,10 +492,12 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
user=old_user,
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
user=old_user,
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
@ -457,4 +509,90 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.TransferredAlias,
|
||||
message=f"Lost ownership of alias due to alias transfer confirmed. New owner is {new_user.id}",
|
||||
user_id=old_user.id,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
old_user,
|
||||
EventContent(
|
||||
alias_deleted=AliasDeleted(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.AcceptTransferAlias,
|
||||
message=f"Accepted alias transfer from user {old_user.id}",
|
||||
user_id=new_user.id,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
new_user,
|
||||
EventContent(
|
||||
alias_created=AliasCreated(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
note=alias.note,
|
||||
enabled=alias.enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def change_alias_status(
|
||||
alias: Alias, enabled: bool, message: Optional[str] = None, commit: bool = False
|
||||
):
|
||||
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
||||
alias.enabled = enabled
|
||||
|
||||
event = AliasStatusChanged(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
enabled=enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
||||
audit_log_message = f"Set alias status to {enabled}"
|
||||
if message is not None:
|
||||
audit_log_message += f". {message}"
|
||||
emit_alias_audit_log(
|
||||
alias, AliasAuditLogAction.ChangeAliasStatus, audit_log_message
|
||||
)
|
||||
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AliasRecipientName:
|
||||
name: str
|
||||
message: Optional[str] = None
|
||||
|
||||
|
||||
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
|
||||
"""
|
||||
Logic:
|
||||
1. If alias has name, use it
|
||||
2. If alias has custom domain, and custom domain has name, use it
|
||||
3. Otherwise, use the alias email as the recipient
|
||||
"""
|
||||
if alias.name:
|
||||
return AliasRecipientName(
|
||||
name=sl_formataddr((alias.name, alias.email)),
|
||||
message=f"Put alias name {alias.name} in from header",
|
||||
)
|
||||
elif alias.custom_domain:
|
||||
if alias.custom_domain.name:
|
||||
return AliasRecipientName(
|
||||
name=sl_formataddr((alias.custom_domain.name, alias.email)),
|
||||
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
|
||||
)
|
||||
return AliasRecipientName(name=alias.email)
|
||||
|
@ -19,6 +19,9 @@ def authorize_request() -> Optional[Tuple[str, int]]:
|
||||
|
||||
if not api_key:
|
||||
if current_user.is_authenticated:
|
||||
# if current_user.is_authenticated and request.headers.get(
|
||||
# constants.HEADER_ALLOW_API_COOKIES
|
||||
# ):
|
||||
g.user = current_user
|
||||
else:
|
||||
return jsonify(error="Wrong api key"), 401
|
||||
|
@ -191,15 +191,8 @@ def get_alias_infos_with_pagination_v3(
|
||||
q = q.order_by(Alias.email.desc())
|
||||
else:
|
||||
# default sorting
|
||||
latest_activity = case(
|
||||
[
|
||||
(Alias.created_at > EmailLog.created_at, Alias.created_at),
|
||||
(Alias.created_at < EmailLog.created_at, EmailLog.created_at),
|
||||
],
|
||||
else_=Alias.created_at,
|
||||
)
|
||||
q = q.order_by(Alias.pinned.desc())
|
||||
q = q.order_by(latest_activity.desc())
|
||||
q = q.order_by(func.greatest(Alias.created_at, EmailLog.created_at).desc())
|
||||
|
||||
q = q.limit(page_limit).offset(page_id * page_size)
|
||||
|
||||
|
@ -1,9 +1,13 @@
|
||||
from typing import Optional
|
||||
|
||||
from deprecated import deprecated
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
|
||||
from app import alias_utils
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.alias_mailbox_utils import set_mailboxes_for_alias
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.api.serializer import (
|
||||
AliasInfo,
|
||||
@ -25,7 +29,8 @@ from app.errors import (
|
||||
ErrAddressInvalid,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, Mailbox, AliasDeleteReason
|
||||
|
||||
|
||||
@deprecated
|
||||
@ -160,7 +165,7 @@ def delete_alias(alias_id):
|
||||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
alias_utils.delete_alias(alias, user)
|
||||
alias_utils.delete_alias(alias, user, AliasDeleteReason.ManualAction)
|
||||
|
||||
return jsonify(deleted=True), 200
|
||||
|
||||
@ -184,7 +189,12 @@ def toggle_alias(alias_id):
|
||||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
alias.enabled = not alias.enabled
|
||||
alias_utils.change_alias_status(
|
||||
alias,
|
||||
enabled=not alias.enabled,
|
||||
message=f"Set enabled={not alias.enabled} via API",
|
||||
)
|
||||
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
||||
Session.commit()
|
||||
|
||||
return jsonify(enabled=alias.enabled), 200
|
||||
@ -270,10 +280,12 @@ def update_alias(alias_id):
|
||||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
changed_fields = []
|
||||
changed = False
|
||||
if "note" in data:
|
||||
new_note = data.get("note")
|
||||
alias.note = new_note
|
||||
changed_fields.append("note")
|
||||
changed = True
|
||||
|
||||
if "mailbox_id" in data:
|
||||
@ -283,35 +295,22 @@ def update_alias(alias_id):
|
||||
return jsonify(error="Forbidden"), 400
|
||||
|
||||
alias.mailbox_id = mailbox_id
|
||||
changed_fields.append(f"mailbox_id ({mailbox_id})")
|
||||
changed = True
|
||||
|
||||
if "mailbox_ids" in data:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
mailboxes: [Mailbox] = []
|
||||
|
||||
# check if all mailboxes belong to user
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||
return jsonify(error="Forbidden"), 400
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
if not mailboxes:
|
||||
return jsonify(error="Must choose at least one mailbox"), 400
|
||||
|
||||
# <<< update alias mailboxes >>>
|
||||
# first remove all existing alias-mailboxes links
|
||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||
Session.flush()
|
||||
|
||||
# then add all new mailboxes
|
||||
for i, mailbox in enumerate(mailboxes):
|
||||
if i == 0:
|
||||
alias.mailbox_id = mailboxes[0].id
|
||||
else:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||
# <<< END update alias mailboxes >>>
|
||||
try:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
except ValueError:
|
||||
return jsonify(error="Invalid mailbox_id"), 400
|
||||
err = set_mailboxes_for_alias(
|
||||
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
||||
)
|
||||
if err:
|
||||
return jsonify(error=err.value), 400
|
||||
|
||||
mailbox_ids_string = ",".join(map(str, mailbox_ids))
|
||||
changed_fields.append(f"mailbox_ids ({mailbox_ids_string})")
|
||||
changed = True
|
||||
|
||||
if "name" in data:
|
||||
@ -323,17 +322,26 @@ def update_alias(alias_id):
|
||||
if new_name:
|
||||
new_name = new_name.replace("\n", "")
|
||||
alias.name = new_name
|
||||
changed_fields.append("name")
|
||||
changed = True
|
||||
|
||||
if "disable_pgp" in data:
|
||||
alias.disable_pgp = data.get("disable_pgp")
|
||||
changed_fields.append("disable_pgp")
|
||||
changed = True
|
||||
|
||||
if "pinned" in data:
|
||||
alias.pinned = data.get("pinned")
|
||||
changed_fields.append("pinned")
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
changed_fields_string = ",".join(changed_fields)
|
||||
emit_alias_audit_log(
|
||||
alias,
|
||||
AliasAuditLogAction.UpdateAlias,
|
||||
f"Alias fields updated ({changed_fields_string})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(ok=True), 200
|
||||
@ -414,15 +422,14 @@ def create_contact_route(alias_id):
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
alias: Alias = Alias.get(alias_id)
|
||||
|
||||
if alias.user_id != g.user.id:
|
||||
alias: Optional[Alias] = Alias.get_by(id=alias_id, user_id=g.user.id)
|
||||
if not alias:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
contact_address = data.get("contact")
|
||||
|
||||
try:
|
||||
contact = create_contact(g.user, alias, contact_address)
|
||||
contact = create_contact(alias, contact_address)
|
||||
except ErrContactErrorUpgradeNeeded as err:
|
||||
return jsonify(error=err.error_for_user()), 403
|
||||
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
||||
@ -444,11 +451,16 @@ def delete_contact(contact_id):
|
||||
200
|
||||
"""
|
||||
user = g.user
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact or contact.alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=contact.alias,
|
||||
action=AliasAuditLogAction.DeleteContact,
|
||||
message=f"Deleted contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
@ -466,12 +478,17 @@ def toggle_contact(contact_id):
|
||||
200
|
||||
"""
|
||||
user = g.user
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact or contact.alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
contact.block_forward = not contact.block_forward
|
||||
emit_alias_audit_log(
|
||||
alias=contact.alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Set contact state {contact.id} {contact.email} -> {contact.website_email} to blocked {contact.block_forward}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(block_forward=contact.block_forward), 200
|
||||
|
@ -1,7 +1,6 @@
|
||||
import secrets
|
||||
import string
|
||||
|
||||
import facebook
|
||||
import google.oauth2.credentials
|
||||
import googleapiclient.discovery
|
||||
from flask import jsonify, request
|
||||
@ -11,7 +10,7 @@ from itsdangerous import Signer
|
||||
from app import email_utils
|
||||
from app.api.base import api_bp
|
||||
from app.config import FLASK_SECRET, DISABLE_REGISTRATION
|
||||
from app.dashboard.views.setting import send_reset_password_email
|
||||
from app.dashboard.views.account_setting import send_reset_password_email
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
@ -23,6 +22,7 @@ from app.events.auth_event import LoginEvent, RegisterEvent
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
|
||||
|
||||
@ -52,8 +52,12 @@ def auth_login():
|
||||
password = data.get("password")
|
||||
device = data.get("device")
|
||||
|
||||
email = sanitize_email(data.get("email"))
|
||||
canonical_email = canonicalize_email(data.get("email"))
|
||||
email = data.get("email")
|
||||
if not email:
|
||||
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Email or password incorrect"), 400
|
||||
email = sanitize_email(email)
|
||||
canonical_email = canonicalize_email(email)
|
||||
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
|
||||
@ -129,8 +133,8 @@ def auth_register():
|
||||
send_email(
|
||||
email,
|
||||
"Just one more step to join SimpleLogin",
|
||||
render("transactional/code-activation.txt.jinja2", code=code),
|
||||
render("transactional/code-activation.html", code=code),
|
||||
render("transactional/code-activation.txt.jinja2", user=user, code=code),
|
||||
render("transactional/code-activation.html", user=user, code=code),
|
||||
)
|
||||
|
||||
RegisterEvent(RegisterEvent.ActionType.success, RegisterEvent.Source.api).send()
|
||||
@ -183,6 +187,11 @@ def auth_activate():
|
||||
|
||||
LOG.d("activate user %s", user)
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ActivateUser,
|
||||
message=f"User has been activated: {user.email}",
|
||||
)
|
||||
AccountActivation.delete(account_activation.id)
|
||||
Session.commit()
|
||||
|
||||
@ -226,8 +235,8 @@ def auth_reactivate():
|
||||
send_email(
|
||||
email,
|
||||
"Just one more step to join SimpleLogin",
|
||||
render("transactional/code-activation.txt.jinja2", code=code),
|
||||
render("transactional/code-activation.html", code=code),
|
||||
render("transactional/code-activation.txt.jinja2", user=user, code=code),
|
||||
render("transactional/code-activation.html", user=user, code=code),
|
||||
)
|
||||
|
||||
return jsonify(msg="User needs to confirm their account"), 200
|
||||
@ -251,6 +260,8 @@ def auth_facebook():
|
||||
}
|
||||
|
||||
"""
|
||||
import facebook
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
@ -2,8 +2,10 @@ from flask import g, request
|
||||
from flask import jsonify
|
||||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.custom_domain_utils import set_custom_domain_mailboxes
|
||||
from app.db import Session
|
||||
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, DomainDeletedAlias
|
||||
|
||||
|
||||
def custom_domain_to_dict(custom_domain: CustomDomain):
|
||||
@ -100,23 +102,14 @@ def update_custom_domain(custom_domain_id):
|
||||
|
||||
if "mailbox_ids" in data:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
if mailbox_ids:
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||
return jsonify(error="Forbidden"), 400
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
result = set_custom_domain_mailboxes(user.id, custom_domain, mailbox_ids)
|
||||
if result.success:
|
||||
changed = True
|
||||
else:
|
||||
LOG.info(
|
||||
f"Prevented from updating mailboxes [custom_domain_id={custom_domain.id}]: {result.reason.value}"
|
||||
)
|
||||
return jsonify(error="Forbidden"), 400
|
||||
|
||||
if changed:
|
||||
Session.commit()
|
||||
|
@ -1,22 +1,13 @@
|
||||
from smtplib import SMTPRecipientsRefused
|
||||
|
||||
import arrow
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
|
||||
from app import mailbox_utils
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import JOB_DELETE_MAILBOX
|
||||
from app.dashboard.views.mailbox import send_verification_email
|
||||
from app.dashboard.views.mailbox_detail import verify_mailbox_change
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.models import Mailbox
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
@ -42,33 +33,21 @@ def create_mailbox():
|
||||
the new mailbox dict
|
||||
"""
|
||||
user = g.user
|
||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||
email = request.get_json().get("email")
|
||||
if not email:
|
||||
return jsonify(error="Invalid email"), 400
|
||||
|
||||
if not user.is_premium():
|
||||
return jsonify(error="Only premium plan can add additional mailbox"), 400
|
||||
mailbox_email = sanitize_email(email)
|
||||
|
||||
if not is_valid_email(mailbox_email):
|
||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||
elif mailbox_already_used(mailbox_email, user):
|
||||
return jsonify(error=f"{mailbox_email} already used"), 400
|
||||
elif not email_can_be_used_as_mailbox(mailbox_email):
|
||||
return (
|
||||
jsonify(
|
||||
error=f"{mailbox_email} cannot be used. Please note a mailbox cannot "
|
||||
f"be a disposable email address"
|
||||
),
|
||||
400,
|
||||
)
|
||||
else:
|
||||
new_mailbox = Mailbox.create(email=mailbox_email, user_id=user.id)
|
||||
Session.commit()
|
||||
try:
|
||||
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
||||
except mailbox_utils.MailboxError as e:
|
||||
return jsonify(error=e.msg), 400
|
||||
|
||||
send_verification_email(user, new_mailbox)
|
||||
|
||||
return (
|
||||
jsonify(mailbox_to_dict(new_mailbox)),
|
||||
201,
|
||||
)
|
||||
return (
|
||||
jsonify(mailbox_to_dict(new_mailbox)),
|
||||
201,
|
||||
)
|
||||
|
||||
|
||||
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["DELETE"])
|
||||
@ -86,47 +65,17 @@ def delete_mailbox(mailbox_id):
|
||||
|
||||
"""
|
||||
user = g.user
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
if mailbox.id == user.default_mailbox_id:
|
||||
return jsonify(error="You cannot delete the default mailbox"), 400
|
||||
|
||||
data = request.get_json() or {}
|
||||
transfer_mailbox_id = data.get("transfer_aliases_to")
|
||||
if transfer_mailbox_id and int(transfer_mailbox_id) >= 0:
|
||||
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
|
||||
transfer_mailbox_id = int(transfer_mailbox_id)
|
||||
else:
|
||||
transfer_mailbox_id = None
|
||||
|
||||
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
|
||||
return (
|
||||
jsonify(error="You must transfer the aliases to a mailbox you own."),
|
||||
403,
|
||||
)
|
||||
|
||||
if transfer_mailbox_id == mailbox_id:
|
||||
return (
|
||||
jsonify(
|
||||
error="You can not transfer the aliases to the mailbox you want to delete."
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if not transfer_mailbox.verified:
|
||||
return jsonify(error="Your new mailbox is not verified"), 400
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.w("schedule delete mailbox job for %s", mailbox)
|
||||
Job.create(
|
||||
name=JOB_DELETE_MAILBOX,
|
||||
payload={
|
||||
"mailbox_id": mailbox.id,
|
||||
"transfer_mailbox_id": transfer_mailbox_id,
|
||||
},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
try:
|
||||
mailbox_utils.delete_mailbox(user, mailbox_id, transfer_mailbox_id)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
return jsonify(error=e.msg), 400
|
||||
|
||||
return jsonify(deleted=True), 200
|
||||
|
||||
@ -168,20 +117,10 @@ def update_mailbox(mailbox_id):
|
||||
|
||||
if "email" in data:
|
||||
new_email = sanitize_email(data.get("email"))
|
||||
|
||||
if mailbox_already_used(new_email, user):
|
||||
return jsonify(error=f"{new_email} already used"), 400
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
return (
|
||||
jsonify(
|
||||
error=f"{new_email} cannot be used. Please note a mailbox cannot "
|
||||
f"be a disposable email address"
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
try:
|
||||
verify_mailbox_change(user, mailbox, new_email)
|
||||
mailbox_utils.request_mailbox_email_change(user, mailbox, new_email)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
return jsonify(error=e.msg), 400
|
||||
except SMTPRecipientsRefused:
|
||||
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
|
||||
else:
|
||||
@ -191,7 +130,7 @@ def update_mailbox(mailbox_id):
|
||||
if "cancel_email_change" in data:
|
||||
cancel_email_change = data.get("cancel_email_change")
|
||||
if cancel_email_change:
|
||||
mailbox.new_email = None
|
||||
mailbox_utils.cancel_email_change(mailbox.id, user)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
|
@ -1,3 +1,4 @@
|
||||
from email_validator import EmailNotValidError
|
||||
from flask import g
|
||||
from flask import jsonify, request
|
||||
|
||||
@ -61,8 +62,17 @@ def new_custom_alias_v2():
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
signed_suffix = data.get("signed_suffix", "").strip()
|
||||
alias_prefix = data.get("alias_prefix", "")
|
||||
if not isinstance(alias_prefix, str) or not alias_prefix:
|
||||
return jsonify(error="invalid value for alias_prefix"), 400
|
||||
|
||||
alias_prefix = alias_prefix.strip().lower().replace(" ", "")
|
||||
signed_suffix = data.get("signed_suffix", "")
|
||||
if not isinstance(signed_suffix, str) or not signed_suffix:
|
||||
return jsonify(error="invalid value for signed_suffix"), 400
|
||||
|
||||
signed_suffix = signed_suffix.strip()
|
||||
|
||||
note = data.get("note")
|
||||
alias_prefix = convert_to_id(alias_prefix)
|
||||
|
||||
@ -93,12 +103,15 @@ def new_custom_alias_v2():
|
||||
400,
|
||||
)
|
||||
|
||||
alias = Alias.create(
|
||||
user_id=user.id,
|
||||
email=full_alias,
|
||||
mailbox_id=user.default_mailbox_id,
|
||||
note=note,
|
||||
)
|
||||
try:
|
||||
alias = Alias.create(
|
||||
user_id=user.id,
|
||||
email=full_alias,
|
||||
mailbox_id=user.default_mailbox_id,
|
||||
note=note,
|
||||
)
|
||||
except EmailNotValidError:
|
||||
return jsonify(error="Email is not valid"), 400
|
||||
|
||||
Session.commit()
|
||||
|
||||
@ -153,8 +166,17 @@ def new_custom_alias_v3():
|
||||
if not isinstance(data, dict):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
alias_prefix_data = data.get("alias_prefix", "") or ""
|
||||
|
||||
if not isinstance(alias_prefix_data, str):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
||||
signed_suffix = data.get("signed_suffix", "") or ""
|
||||
|
||||
if not isinstance(signed_suffix, str):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
signed_suffix = signed_suffix.strip()
|
||||
|
||||
mailbox_ids = data.get("mailbox_ids")
|
||||
|
@ -12,7 +12,7 @@ from app.models import (
|
||||
SenderFormatEnum,
|
||||
AliasSuffixEnum,
|
||||
)
|
||||
from app.proton.utils import perform_proton_account_unlink
|
||||
from app.proton.proton_unlink import perform_proton_account_unlink
|
||||
|
||||
|
||||
def setting_to_dict(user: User):
|
||||
@ -144,5 +144,6 @@ def get_available_domains_for_random_alias_v2():
|
||||
@require_api_auth
|
||||
def unlink_proton_account():
|
||||
user = g.user
|
||||
perform_proton_account_unlink(user)
|
||||
if not perform_proton_account_unlink(user):
|
||||
return jsonify(error="The account cannot be unlinked"), 400
|
||||
return jsonify({"ok": True})
|
||||
|
@ -2,10 +2,11 @@ from flask import jsonify, g
|
||||
from sqlalchemy_utils.types.arrow import arrow
|
||||
|
||||
from app.api.base import api_bp, require_api_sudo, require_api_auth
|
||||
from app import config
|
||||
from app.constants import JobType
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import Job, ApiToCookieToken
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
@api_bp.route("/user", methods=["DELETE"])
|
||||
@ -16,9 +17,14 @@ def delete_user():
|
||||
|
||||
"""
|
||||
# Schedule delete account job
|
||||
emit_user_audit_log(
|
||||
user=g.user,
|
||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||
message=f"Marked user {g.user.id} ({g.user.email}) for deletion from API",
|
||||
)
|
||||
LOG.w("schedule delete account job for %s", g.user)
|
||||
Job.create(
|
||||
name=config.JOB_DELETE_ACCOUNT,
|
||||
name=JobType.DELETE_ACCOUNT.value,
|
||||
payload={"user_id": g.user.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
@ -38,6 +44,8 @@ def get_api_session_token():
|
||||
token: "asdli3ldq39h9hd3",
|
||||
}
|
||||
"""
|
||||
if not g.api_key:
|
||||
return jsonify(ok=False), 401
|
||||
token = ApiToCookieToken.create(
|
||||
user=g.user,
|
||||
api_key_id=g.api_key.id,
|
||||
|
@ -10,8 +10,9 @@ from app.api.base import api_bp, require_api_auth
|
||||
from app.config import SESSION_COOKIE_NAME
|
||||
from app.dashboard.views.index import get_stats
|
||||
from app.db import Session
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.models import ApiKey, File, PartnerUser, User
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from app.session import logout_session
|
||||
from app.utils import random_string
|
||||
|
||||
@ -78,17 +79,18 @@ def update_user_info():
|
||||
data = request.get_json() or {}
|
||||
|
||||
if "profile_picture" in data:
|
||||
if data["profile_picture"] is None:
|
||||
if user.profile_picture_id:
|
||||
file = user.profile_picture
|
||||
user.profile_picture_id = None
|
||||
if user.profile_picture_id:
|
||||
file = user.profile_picture
|
||||
user.profile_picture_id = None
|
||||
Session.flush()
|
||||
if file:
|
||||
File.delete(file.id)
|
||||
s3.delete(file.path)
|
||||
Session.flush()
|
||||
if file:
|
||||
File.delete(file.id)
|
||||
s3.delete(file.path)
|
||||
Session.flush()
|
||||
else:
|
||||
if data["profile_picture"] is not None:
|
||||
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
||||
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
||||
return jsonify(error="Unsupported image format"), 400
|
||||
file_path = random_string(30)
|
||||
file = File.create(user_id=user.id, path=file_path)
|
||||
Session.flush()
|
||||
|
@ -16,6 +16,7 @@ from .views import (
|
||||
social,
|
||||
recovery,
|
||||
api_to_cookie,
|
||||
oidc,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@ -36,4 +37,5 @@ __all__ = [
|
||||
"social",
|
||||
"recovery",
|
||||
"api_to_cookie",
|
||||
"oidc",
|
||||
]
|
||||
|
@ -7,6 +7,7 @@ from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import ActivationCode
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
|
||||
@ -47,6 +48,11 @@ def activate():
|
||||
|
||||
user = activation_code.user
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ActivateUser,
|
||||
message=f"User has been activated: {user.email}",
|
||||
)
|
||||
login_user(user)
|
||||
|
||||
# activation code is to be used only once
|
||||
|
@ -3,10 +3,13 @@ from flask_login import login_user
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import EmailChange, ResetPasswordCode
|
||||
|
||||
|
||||
@auth_bp.route("/change_email", methods=["GET", "POST"])
|
||||
@limiter.limit("3/hour")
|
||||
def change_email():
|
||||
code = request.args.get("code")
|
||||
|
||||
@ -22,12 +25,14 @@ def change_email():
|
||||
return render_template("auth/change_email.html")
|
||||
|
||||
user = email_change.user
|
||||
old_email = user.email
|
||||
user.email = email_change.new_email
|
||||
|
||||
EmailChange.delete(email_change.id)
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
Session.commit()
|
||||
|
||||
LOG.i(f"User {user} has changed their email from {old_email} to {user.email}")
|
||||
flash("Your new email has been updated", "success")
|
||||
|
||||
login_user(user)
|
||||
|
@ -3,7 +3,7 @@ from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.dashboard.views.setting import send_reset_password_email
|
||||
from app.dashboard.views.account_setting import send_reset_password_email
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
|
@ -5,11 +5,12 @@ from wtforms import StringField, validators
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.auth.views.login_utils import after_login
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON, OIDC_CLIENT_ID
|
||||
from app.events.auth_event import LoginEvent
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
from app.pw_models import PasswordOracle
|
||||
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
||||
|
||||
|
||||
@ -43,6 +44,13 @@ def login():
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
|
||||
if not user or not user.check_password(form.password.data):
|
||||
if not user:
|
||||
# Do the hash to avoid timing attacks nevertheless
|
||||
dummy_pw = PasswordOracle()
|
||||
dummy_pw.password = (
|
||||
"$2b$12$ZWqpL73h4rGNfLkJohAFAu0isqSw/bX9p/tzpbWRz/To5FAftaW8u"
|
||||
)
|
||||
dummy_pw.check_password(form.password.data)
|
||||
# Trigger rate limiter
|
||||
g.deduct_limit = True
|
||||
form.password.data = None
|
||||
@ -77,4 +85,6 @@ def login():
|
||||
next_url=next_url,
|
||||
show_resend_activation=show_resend_activation,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
connect_with_oidc=OIDC_CLIENT_ID is not None,
|
||||
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
|
||||
)
|
||||
|
135
app/app/auth/views/oidc.py
Normal file
135
app/app/auth/views/oidc.py
Normal file
@ -0,0 +1,135 @@
|
||||
from flask import request, session, redirect, flash, url_for
|
||||
from requests_oauthlib import OAuth2Session
|
||||
|
||||
import requests
|
||||
|
||||
from app import config
|
||||
from app.auth.base import auth_bp
|
||||
from app.auth.views.login_utils import after_login
|
||||
from app.config import (
|
||||
URL,
|
||||
OIDC_SCOPES,
|
||||
OIDC_NAME_FIELD,
|
||||
)
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.log import LOG
|
||||
from app.models import User, SocialAuth
|
||||
from app.utils import sanitize_email, sanitize_next_url
|
||||
|
||||
|
||||
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
|
||||
# when served behind nginx, the redirect_uri is localhost... and not the real url
|
||||
redirect_uri = URL + "/auth/oidc/callback"
|
||||
|
||||
SESSION_STATE_KEY = "oauth_state"
|
||||
SESSION_NEXT_KEY = "oauth_redirect_next"
|
||||
|
||||
|
||||
@auth_bp.route("/oidc/login")
|
||||
def oidc_login():
|
||||
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
next_url = sanitize_next_url(request.args.get("next"))
|
||||
|
||||
auth_url = requests.get(config.OIDC_WELL_KNOWN_URL).json()["authorization_endpoint"]
|
||||
|
||||
oidc = OAuth2Session(
|
||||
config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri
|
||||
)
|
||||
authorization_url, state = oidc.authorization_url(auth_url)
|
||||
|
||||
# State is used to prevent CSRF, keep this for later.
|
||||
session[SESSION_STATE_KEY] = state
|
||||
session[SESSION_NEXT_KEY] = next_url
|
||||
return redirect(authorization_url)
|
||||
|
||||
|
||||
@auth_bp.route("/oidc/callback")
|
||||
def oidc_callback():
|
||||
if SESSION_STATE_KEY not in session:
|
||||
flash("Invalid state, please retry", "error")
|
||||
return redirect(url_for("auth.login"))
|
||||
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
# user clicks on cancel
|
||||
if "error" in request.args:
|
||||
flash("Please use another sign in method then", "warning")
|
||||
return redirect("/")
|
||||
|
||||
oidc_configuration = requests.get(config.OIDC_WELL_KNOWN_URL).json()
|
||||
user_info_url = oidc_configuration["userinfo_endpoint"]
|
||||
token_url = oidc_configuration["token_endpoint"]
|
||||
|
||||
oidc = OAuth2Session(
|
||||
config.OIDC_CLIENT_ID,
|
||||
state=session[SESSION_STATE_KEY],
|
||||
scope=[OIDC_SCOPES],
|
||||
redirect_uri=redirect_uri,
|
||||
)
|
||||
oidc.fetch_token(
|
||||
token_url,
|
||||
client_secret=config.OIDC_CLIENT_SECRET,
|
||||
authorization_response=request.url,
|
||||
)
|
||||
|
||||
oidc_user_data = oidc.get(user_info_url)
|
||||
if oidc_user_data.status_code != 200:
|
||||
LOG.e(
|
||||
f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}"
|
||||
)
|
||||
flash(
|
||||
"Cannot get user data from OIDC, please use another way to login/sign up",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("auth.login"))
|
||||
oidc_user_data = oidc_user_data.json()
|
||||
|
||||
email = oidc_user_data.get("email")
|
||||
|
||||
if not email:
|
||||
LOG.e(f"cannot get email for OIDC user {oidc_user_data} {email}")
|
||||
flash(
|
||||
"Cannot get a valid email from OIDC, please another way to login/sign up",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
email = sanitize_email(email)
|
||||
user = User.get_by(email=email)
|
||||
|
||||
if not user and config.DISABLE_REGISTRATION:
|
||||
flash(
|
||||
"Sorry you cannot sign up via the OIDC provider. Please sign-up first with your email.",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("auth.register"))
|
||||
elif not user:
|
||||
user = create_user(email, oidc_user_data)
|
||||
|
||||
if not SocialAuth.get_by(user_id=user.id, social="oidc"):
|
||||
SocialAuth.create(user_id=user.id, social="oidc")
|
||||
Session.commit()
|
||||
|
||||
# The activation link contains the original page, for ex authorize page
|
||||
next_url = session[SESSION_NEXT_KEY]
|
||||
session[SESSION_NEXT_KEY] = None
|
||||
|
||||
return after_login(user, next_url)
|
||||
|
||||
|
||||
def create_user(email, oidc_user_data):
|
||||
new_user = User.create(
|
||||
email=email,
|
||||
name=oidc_user_data.get(OIDC_NAME_FIELD),
|
||||
password="",
|
||||
activated=True,
|
||||
)
|
||||
LOG.i(f"Created new user for login request from OIDC. New user {new_user.id}")
|
||||
Session.commit()
|
||||
|
||||
send_welcome_email(new_user)
|
||||
|
||||
return new_user
|
@ -23,7 +23,7 @@ from app.proton.proton_callback_handler import (
|
||||
ProtonCallbackHandler,
|
||||
Action,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from app.utils import sanitize_next_url, sanitize_scheme
|
||||
|
||||
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
|
||||
|
@ -6,7 +6,7 @@ from wtforms import StringField, validators
|
||||
|
||||
from app import email_utils, config
|
||||
from app.auth.base import auth_bp
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON
|
||||
from app.auth.views.login_utils import get_referral
|
||||
from app.config import URL, HCAPTCHA_SECRET, HCAPTCHA_SITEKEY
|
||||
from app.db import Session
|
||||
@ -109,11 +109,14 @@ def register():
|
||||
next_url=next_url,
|
||||
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
connect_with_oidc=config.OIDC_CLIENT_ID is not None,
|
||||
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
|
||||
)
|
||||
|
||||
|
||||
def send_activation_email(user, next_url):
|
||||
# the activation code is valid for 1h
|
||||
# the activation code is valid for 1h and delete all previous codes
|
||||
Session.query(ActivationCode).filter(ActivationCode.user_id == user.id).delete()
|
||||
activation = ActivationCode.create(user_id=user.id, code=random_string(30))
|
||||
Session.commit()
|
||||
|
||||
@ -123,4 +126,4 @@ def send_activation_email(user, next_url):
|
||||
LOG.d("redirect user to %s after activation", next_url)
|
||||
activation_link = activation_link + "&next=" + encode_url(next_url)
|
||||
|
||||
email_utils.send_activation_email(user.email, activation_link)
|
||||
email_utils.send_activation_email(user, activation_link)
|
||||
|
@ -9,6 +9,7 @@ from app.auth.views.login_utils import after_login
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.models import ResetPasswordCode
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class ResetPasswordForm(FlaskForm):
|
||||
@ -59,6 +60,11 @@ def reset_password():
|
||||
|
||||
# this can be served to activate user too
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ResetPassword,
|
||||
message="User has reset their password",
|
||||
)
|
||||
|
||||
# remove all reset password codes
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
|
@ -1,2 +1,3 @@
|
||||
SHA1 = "dev"
|
||||
BUILD_TIME = "1652365083"
|
||||
VERSION = SHA1
|
||||
|
@ -3,7 +3,7 @@ import random
|
||||
import socket
|
||||
import string
|
||||
from ast import literal_eval
|
||||
from typing import Callable, List
|
||||
from typing import Callable, List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from dotenv import load_dotenv
|
||||
@ -35,6 +35,44 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
|
||||
return literal_eval(value)
|
||||
|
||||
|
||||
def get_env_dict(env_var: str) -> dict[str, str]:
|
||||
"""
|
||||
Get an env variable and convert it into a python dictionary with keys and values as strings.
|
||||
Args:
|
||||
env_var (str): env var, example: SL_DB
|
||||
|
||||
Syntax is: key1=value1;key2=value2
|
||||
Components separated by ;
|
||||
key and value separated by =
|
||||
"""
|
||||
value = os.getenv(env_var)
|
||||
if not value:
|
||||
return {}
|
||||
|
||||
components = value.split(";")
|
||||
result = {}
|
||||
for component in components:
|
||||
if component == "":
|
||||
continue
|
||||
parts = component.split("=")
|
||||
if len(parts) != 2:
|
||||
raise Exception(f"Invalid config for env var {env_var}")
|
||||
result[parts[0].strip()] = parts[1].strip()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_env_csv(env_var: str, default: Optional[str]) -> list[str]:
|
||||
"""
|
||||
Get an env variable and convert it into a list of strings separated by,
|
||||
Syntax is: val1,val2
|
||||
"""
|
||||
value = os.getenv(env_var, default)
|
||||
if not value:
|
||||
return []
|
||||
return [field.strip() for field in value.split(",") if field.strip()]
|
||||
|
||||
|
||||
config_file = os.environ.get("CONFIG")
|
||||
if config_file:
|
||||
config_file = get_abs_path(config_file)
|
||||
@ -120,7 +158,7 @@ if POSTFIX_SUBMISSION_TLS:
|
||||
else:
|
||||
default_postfix_port = 25
|
||||
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", default_postfix_port))
|
||||
POSTFIX_TIMEOUT = os.environ.get("POSTFIX_TIMEOUT", 3)
|
||||
POSTFIX_TIMEOUT = int(os.environ.get("POSTFIX_TIMEOUT", 3))
|
||||
|
||||
# ["domain1.com", "domain2.com"]
|
||||
OTHER_ALIAS_DOMAINS = sl_getenv("OTHER_ALIAS_DOMAINS", list)
|
||||
@ -144,6 +182,14 @@ FIRST_ALIAS_DOMAIN = os.environ.get("FIRST_ALIAS_DOMAIN") or EMAIL_DOMAIN
|
||||
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
|
||||
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
|
||||
|
||||
PROTON_MX_SERVERS = get_env_csv(
|
||||
"PROTON_MX_SERVERS", "mail.protonmail.ch., mailsec.protonmail.ch."
|
||||
)
|
||||
|
||||
PROTON_EMAIL_DOMAINS = get_env_csv(
|
||||
"PROTON_EMAIL_DOMAINS", "proton.me, protonmail.com, protonmail.ch, proton.ch, pm.me"
|
||||
)
|
||||
|
||||
# disable the alias suffix, i.e. the ".random_word" part
|
||||
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
|
||||
|
||||
@ -234,7 +280,7 @@ else:
|
||||
|
||||
print("WARNING: Use a temp directory for GNUPGHOME", GNUPGHOME)
|
||||
|
||||
# Github, Google, Facebook client id and secrets
|
||||
# Github, Google, Facebook, OIDC client id and secrets
|
||||
GITHUB_CLIENT_ID = os.environ.get("GITHUB_CLIENT_ID")
|
||||
GITHUB_CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
|
||||
|
||||
@ -244,6 +290,13 @@ GOOGLE_CLIENT_SECRET = os.environ.get("GOOGLE_CLIENT_SECRET")
|
||||
FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
|
||||
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
|
||||
|
||||
CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON")
|
||||
OIDC_WELL_KNOWN_URL = os.environ.get("OIDC_WELL_KNOWN_URL")
|
||||
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID")
|
||||
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET")
|
||||
OIDC_SCOPES = os.environ.get("OIDC_SCOPES")
|
||||
OIDC_NAME_FIELD = os.environ.get("OIDC_NAME_FIELD", "name")
|
||||
|
||||
PROTON_CLIENT_ID = os.environ.get("PROTON_CLIENT_ID")
|
||||
PROTON_CLIENT_SECRET = os.environ.get("PROTON_CLIENT_SECRET")
|
||||
PROTON_BASE_URL = os.environ.get(
|
||||
@ -263,18 +316,6 @@ MFA_USER_ID = "mfa_user_id"
|
||||
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
|
||||
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
|
||||
|
||||
# Job names
|
||||
JOB_ONBOARDING_1 = "onboarding-1"
|
||||
JOB_ONBOARDING_2 = "onboarding-2"
|
||||
JOB_ONBOARDING_3 = "onboarding-3"
|
||||
JOB_ONBOARDING_4 = "onboarding-4"
|
||||
JOB_BATCH_IMPORT = "batch-import"
|
||||
JOB_DELETE_ACCOUNT = "delete-account"
|
||||
JOB_DELETE_MAILBOX = "delete-mailbox"
|
||||
JOB_DELETE_DOMAIN = "delete-domain"
|
||||
JOB_SEND_USER_REPORT = "send-user-report"
|
||||
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||
|
||||
# for pagination
|
||||
PAGE_LIMIT = 20
|
||||
|
||||
@ -421,6 +462,11 @@ try:
|
||||
except Exception:
|
||||
HIBP_SCAN_INTERVAL_DAYS = 7
|
||||
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
|
||||
HIBP_MAX_ALIAS_CHECK = 10_000
|
||||
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100))
|
||||
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
||||
|
||||
KEEP_OLD_DATA_DAYS = 30
|
||||
|
||||
POSTMASTER = os.environ.get("POSTMASTER")
|
||||
|
||||
@ -561,9 +607,66 @@ SKIP_MX_LOOKUP_ON_CHECK = False
|
||||
|
||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||
|
||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||
|
||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||
|
||||
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
|
||||
|
||||
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
|
||||
|
||||
# We want it disabled by default, so only skip if defined
|
||||
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
|
||||
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ
|
||||
|
||||
|
||||
def read_webhook_enabled_user_ids() -> Optional[List[int]]:
|
||||
user_ids = os.environ.get("EVENT_WEBHOOK_ENABLED_USER_IDS", None)
|
||||
if user_ids is None:
|
||||
return None
|
||||
|
||||
ids = []
|
||||
for user_id in user_ids.split(","):
|
||||
try:
|
||||
ids.append(int(user_id.strip()))
|
||||
except ValueError:
|
||||
pass
|
||||
return ids
|
||||
|
||||
|
||||
EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_ids()
|
||||
|
||||
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
|
||||
# It defaults to the regular DB_URI in case it's needed
|
||||
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
|
||||
|
||||
|
||||
def read_partner_dict(var: str) -> dict[int, str]:
|
||||
partner_value = get_env_dict(var)
|
||||
if len(partner_value) == 0:
|
||||
return {}
|
||||
|
||||
res: dict[int, str] = {}
|
||||
for partner_id in partner_value.keys():
|
||||
try:
|
||||
partner_id_int = int(partner_id.strip())
|
||||
res[partner_id_int] = partner_value[partner_id]
|
||||
except ValueError:
|
||||
pass
|
||||
return res
|
||||
|
||||
|
||||
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
|
||||
"PARTNER_DNS_CUSTOM_DOMAINS"
|
||||
)
|
||||
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
|
||||
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
|
||||
)
|
||||
|
||||
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
|
||||
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
|
||||
)
|
||||
|
||||
AUDIT_LOG_MAX_DAYS = int(os.environ.get("AUDIT_LOG_MAX_DAYS", 30))
|
||||
|
18
app/app/constants.py
Normal file
18
app/app/constants.py
Normal file
@ -0,0 +1,18 @@
|
||||
import enum
|
||||
|
||||
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
||||
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||
|
||||
|
||||
class JobType(enum.Enum):
|
||||
ONBOARDING_1 = "onboarding-1"
|
||||
ONBOARDING_2 = "onboarding-2"
|
||||
ONBOARDING_4 = "onboarding-4"
|
||||
BATCH_IMPORT = "batch-import"
|
||||
DELETE_ACCOUNT = "delete-account"
|
||||
DELETE_MAILBOX = "delete-mailbox"
|
||||
DELETE_DOMAIN = "delete-domain"
|
||||
SEND_USER_REPORT = "send-user-report"
|
||||
SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||
SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||
SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
138
app/app/contact_utils.py
Normal file
138
app/app/contact_utils.py
Normal file
@ -0,0 +1,138 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.db import Session
|
||||
from app.email_utils import generate_reply_email, parse_full_address
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Contact, Alias
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
class ContactCreateError(Enum):
|
||||
InvalidEmail = "Invalid email"
|
||||
NotAllowed = "Your plan does not allow to create contacts"
|
||||
Unknown = "Unknown error when trying to create contact"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContactCreateResult:
|
||||
contact: Optional[Contact]
|
||||
created: bool
|
||||
error: Optional[ContactCreateError]
|
||||
|
||||
|
||||
def __update_contact_if_needed(
|
||||
contact: Contact, name: Optional[str], mail_from: Optional[str]
|
||||
) -> ContactCreateResult:
|
||||
if name and contact.name != name:
|
||||
LOG.d(f"Setting {contact} name to {name}")
|
||||
contact.name = name
|
||||
Session.commit()
|
||||
if mail_from and contact.mail_from is None:
|
||||
LOG.d(f"Setting {contact} mail_from to {mail_from}")
|
||||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
return ContactCreateResult(contact, created=False, error=None)
|
||||
|
||||
|
||||
def create_contact(
|
||||
email: str,
|
||||
alias: Alias,
|
||||
name: Optional[str] = None,
|
||||
mail_from: Optional[str] = None,
|
||||
allow_empty_email: bool = False,
|
||||
automatic_created: bool = False,
|
||||
from_partner: bool = False,
|
||||
) -> ContactCreateResult:
|
||||
# If user cannot create contacts, they still need to be created when receiving an email for an alias
|
||||
if not automatic_created and not alias.user.can_create_contacts():
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.NotAllowed
|
||||
)
|
||||
# Parse emails with form 'name <email>'
|
||||
try:
|
||||
email_name, email = parse_full_address(email)
|
||||
except ValueError:
|
||||
email = ""
|
||||
email_name = ""
|
||||
# If no name is explicitly given try to get it from the parsed email
|
||||
if name is None:
|
||||
name = email_name[: Contact.MAX_NAME_LENGTH]
|
||||
else:
|
||||
name = name[: Contact.MAX_NAME_LENGTH]
|
||||
# If still no name is there, make sure the name is None instead of empty string
|
||||
if not name:
|
||||
name = None
|
||||
if name is not None and "\x00" in name:
|
||||
LOG.w("Cannot use contact name because has \\x00")
|
||||
name = ""
|
||||
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
|
||||
email = sanitize_email(email, not_lower=True)
|
||||
if not is_valid_email(email):
|
||||
LOG.w(f"invalid contact email {email}")
|
||||
if not allow_empty_email:
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.InvalidEmail
|
||||
)
|
||||
LOG.d("Create a contact with invalid email for %s", alias)
|
||||
# either reuse a contact with empty email or create a new contact with empty email
|
||||
email = ""
|
||||
# If contact exists, update name and mail_from if needed
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
||||
if contact is not None:
|
||||
return __update_contact_if_needed(contact, name, mail_from)
|
||||
# Create the contact
|
||||
reply_email = generate_reply_email(email, alias)
|
||||
alias_id = alias.id
|
||||
try:
|
||||
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
|
||||
is_invalid_email = email == ""
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=email,
|
||||
name=name,
|
||||
reply_email=reply_email,
|
||||
mail_from=mail_from,
|
||||
automatic_created=automatic_created,
|
||||
flags=flags,
|
||||
invalid_email=is_invalid_email,
|
||||
commit=True,
|
||||
)
|
||||
contact_id = contact.id
|
||||
if automatic_created:
|
||||
trail = ". Automatically created"
|
||||
else:
|
||||
trail = ". Created by user action"
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.CreateContact,
|
||||
message=f"Created contact {contact_id} ({email}){trail}",
|
||||
commit=True,
|
||||
)
|
||||
LOG.d(
|
||||
f"Created contact {contact} for alias {alias} with email {email} invalid_email={is_invalid_email}"
|
||||
)
|
||||
return ContactCreateResult(contact, created=True, error=None)
|
||||
except IntegrityError:
|
||||
Session.rollback()
|
||||
LOG.info(
|
||||
f"Contact with email {email} for alias_id {alias_id} already existed, fetching from DB"
|
||||
)
|
||||
contact: Optional[Contact] = Contact.get_by(
|
||||
alias_id=alias_id, website_email=email
|
||||
)
|
||||
if contact:
|
||||
return __update_contact_if_needed(contact, name, mail_from)
|
||||
else:
|
||||
LOG.warning(
|
||||
f"Could not find contact with email {email} for alias_id {alias_id} and it should exist"
|
||||
)
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.Unknown
|
||||
)
|
149
app/app/coupon_utils.py
Normal file
149
app/app/coupon_utils.py
Normal file
@ -0,0 +1,149 @@
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from sqlalchemy import or_, update, and_
|
||||
|
||||
from app.config import ADMIN_EMAIL
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
User,
|
||||
ManualSubscription,
|
||||
Coupon,
|
||||
LifetimeCoupon,
|
||||
PartnerSubscription,
|
||||
PartnerUser,
|
||||
)
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class CouponUserCannotRedeemError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def redeem_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
||||
if user.lifetime:
|
||||
LOG.i(f"User {user} is a lifetime SL user. Cannot redeem coupons")
|
||||
raise CouponUserCannotRedeemError()
|
||||
|
||||
sub = user.get_active_subscription()
|
||||
if sub and not isinstance(sub, ManualSubscription):
|
||||
LOG.i(
|
||||
f"User {user} has an active subscription that is not manual. Cannot redeem coupon {coupon_code}"
|
||||
)
|
||||
raise CouponUserCannotRedeemError()
|
||||
|
||||
coupon = Coupon.get_by(code=coupon_code)
|
||||
if not coupon:
|
||||
LOG.i(f"User is trying to redeem coupon {coupon_code} that does not exist")
|
||||
return None
|
||||
|
||||
now = arrow.utcnow()
|
||||
stmt = (
|
||||
update(Coupon)
|
||||
.where(
|
||||
and_(
|
||||
Coupon.code == coupon_code,
|
||||
Coupon.used == False, # noqa: E712
|
||||
or_(
|
||||
Coupon.expires_date == None, # noqa: E711
|
||||
Coupon.expires_date > now,
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(used=True, used_by_user_id=user.id, updated_at=now)
|
||||
)
|
||||
res = Session.execute(stmt)
|
||||
if res.rowcount == 0:
|
||||
LOG.i(f"Coupon {coupon.id} could not be redeemed. It's expired or invalid.")
|
||||
return None
|
||||
|
||||
LOG.i(
|
||||
f"Redeemed normal coupon {coupon.id} for {coupon.nb_year} years by user {user}"
|
||||
)
|
||||
if sub:
|
||||
# renew existing subscription
|
||||
if sub.end_at > arrow.now():
|
||||
sub.end_at = sub.end_at.shift(years=coupon.nb_year)
|
||||
else:
|
||||
sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||
else:
|
||||
# There may be an expired manual subscription
|
||||
sub = ManualSubscription.get_by(user_id=user.id)
|
||||
end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||
if sub:
|
||||
sub.end_at = end_at
|
||||
else:
|
||||
sub = ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=end_at,
|
||||
comment="using coupon code",
|
||||
is_giveaway=coupon.is_giveaway,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"User {user} redeemed coupon {coupon.id} for {coupon.nb_year} years",
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||
),
|
||||
)
|
||||
Session.commit()
|
||||
return coupon
|
||||
|
||||
|
||||
def redeem_lifetime_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
||||
if user.lifetime:
|
||||
return None
|
||||
partner_sub = (
|
||||
Session.query(PartnerSubscription)
|
||||
.join(PartnerUser, PartnerUser.id == PartnerSubscription.partner_user_id)
|
||||
.filter(PartnerUser.user_id == user.id, PartnerSubscription.lifetime == True) # noqa: E712
|
||||
.first()
|
||||
)
|
||||
if partner_sub is not None:
|
||||
return None
|
||||
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=coupon_code)
|
||||
if not coupon:
|
||||
return None
|
||||
|
||||
stmt = (
|
||||
update(LifetimeCoupon)
|
||||
.where(
|
||||
and_(
|
||||
LifetimeCoupon.code == coupon_code,
|
||||
LifetimeCoupon.nb_used > 0,
|
||||
)
|
||||
)
|
||||
.values(nb_used=LifetimeCoupon.nb_used - 1)
|
||||
)
|
||||
res = Session.execute(stmt)
|
||||
if res.rowcount == 0:
|
||||
LOG.i("Coupon could not be redeemed")
|
||||
return None
|
||||
|
||||
user.lifetime = True
|
||||
user.lifetime_coupon_id = coupon.id
|
||||
if coupon.paid:
|
||||
user.paid_lifetime = True
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(user_plan_change=UserPlanChanged(lifetime=True)),
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
# notify admin
|
||||
send_email(
|
||||
ADMIN_EMAIL,
|
||||
subject=f"User {user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
||||
plaintext="",
|
||||
html="",
|
||||
)
|
||||
|
||||
return coupon
|
206
app/app/custom_domain_utils.py
Normal file
206
app/app/custom_domain_utils.py
Normal file
@ -0,0 +1,206 @@
|
||||
import arrow
|
||||
import re
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from app.constants import JobType
|
||||
from app.db import Session
|
||||
from app.email_utils import get_email_domain_part
|
||||
from app.log import LOG
|
||||
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
|
||||
_MAX_MAILBOXES_PER_DOMAIN = 20
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateCustomDomainResult:
|
||||
message: str = ""
|
||||
message_category: str = ""
|
||||
success: bool = False
|
||||
instance: Optional[CustomDomain] = None
|
||||
redirect: Optional[str] = None
|
||||
|
||||
|
||||
class CannotUseDomainReason(Enum):
|
||||
InvalidDomain = 1
|
||||
BuiltinDomain = 2
|
||||
DomainAlreadyUsed = 3
|
||||
DomainPartOfUserEmail = 4
|
||||
DomainUserInMailbox = 5
|
||||
|
||||
def message(self, domain: str) -> str:
|
||||
if self == CannotUseDomainReason.InvalidDomain:
|
||||
return "This is not a valid domain"
|
||||
elif self == CannotUseDomainReason.BuiltinDomain:
|
||||
return "A custom domain cannot be a built-in domain."
|
||||
elif self == CannotUseDomainReason.DomainAlreadyUsed:
|
||||
return f"{domain} already used"
|
||||
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
|
||||
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
|
||||
elif self == CannotUseDomainReason.DomainUserInMailbox:
|
||||
return f"{domain} already used in a SimpleLogin mailbox"
|
||||
else:
|
||||
raise Exception("Invalid CannotUseDomainReason")
|
||||
|
||||
|
||||
class CannotSetCustomDomainMailboxesCause(Enum):
|
||||
InvalidMailbox = "Something went wrong, please retry"
|
||||
NoMailboxes = "You must select at least 1 mailbox"
|
||||
TooManyMailboxes = (
|
||||
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetCustomDomainMailboxesResult:
|
||||
success: bool
|
||||
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
|
||||
|
||||
|
||||
def is_valid_domain(domain: str) -> bool:
|
||||
"""
|
||||
Checks that a domain is valid according to RFC 1035
|
||||
"""
|
||||
if len(domain) > 255:
|
||||
return False
|
||||
if domain.endswith("."):
|
||||
domain = domain[:-1] # Strip the trailing dot
|
||||
labels = domain.split(".")
|
||||
if not labels:
|
||||
return False
|
||||
for label in labels:
|
||||
if not _ALLOWED_DOMAIN_REGEX.match(label):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sanitize_domain(domain: str) -> str:
|
||||
new_domain = domain.lower().strip()
|
||||
if new_domain.startswith("http://"):
|
||||
new_domain = new_domain[len("http://") :]
|
||||
|
||||
if new_domain.startswith("https://"):
|
||||
new_domain = new_domain[len("https://") :]
|
||||
|
||||
return new_domain
|
||||
|
||||
|
||||
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
|
||||
if not is_valid_domain(domain):
|
||||
return CannotUseDomainReason.InvalidDomain
|
||||
elif SLDomain.get_by(domain=domain):
|
||||
return CannotUseDomainReason.BuiltinDomain
|
||||
elif CustomDomain.get_by(domain=domain):
|
||||
return CannotUseDomainReason.DomainAlreadyUsed
|
||||
elif get_email_domain_part(user.email) == domain:
|
||||
return CannotUseDomainReason.DomainPartOfUserEmail
|
||||
elif Mailbox.filter(
|
||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
|
||||
).first():
|
||||
return CannotUseDomainReason.DomainUserInMailbox
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def create_custom_domain(
|
||||
user: User, domain: str, partner_id: Optional[int] = None
|
||||
) -> CreateCustomDomainResult:
|
||||
if not user.is_premium():
|
||||
return CreateCustomDomainResult(
|
||||
message="Only premium plan can add custom domain",
|
||||
message_category="warning",
|
||||
)
|
||||
|
||||
new_domain = sanitize_domain(domain)
|
||||
domain_forbidden_cause = can_domain_be_used(user, new_domain)
|
||||
if domain_forbidden_cause:
|
||||
return CreateCustomDomainResult(
|
||||
message=domain_forbidden_cause.message(new_domain), message_category="error"
|
||||
)
|
||||
|
||||
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
|
||||
|
||||
# new domain has ownership verified if its parent has the ownership verified
|
||||
for root_cd in user.custom_domains:
|
||||
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
|
||||
LOG.i(
|
||||
"%s ownership verified thanks to %s",
|
||||
new_custom_domain,
|
||||
root_cd,
|
||||
)
|
||||
new_custom_domain.ownership_verified = True
|
||||
|
||||
# Add the partner_id in case it's passed
|
||||
if partner_id is not None:
|
||||
new_custom_domain.partner_id = partner_id
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateCustomDomain,
|
||||
message=f"Created custom domain {new_custom_domain.id} ({new_domain})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return CreateCustomDomainResult(
|
||||
success=True,
|
||||
instance=new_custom_domain,
|
||||
)
|
||||
|
||||
|
||||
def delete_custom_domain(domain: CustomDomain):
|
||||
# Schedule delete domain job
|
||||
LOG.w("schedule delete domain job for %s", domain)
|
||||
domain.pending_deletion = True
|
||||
Job.create(
|
||||
name=JobType.DELETE_DOMAIN.value,
|
||||
payload={"custom_domain_id": domain.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
|
||||
def set_custom_domain_mailboxes(
|
||||
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
|
||||
) -> SetCustomDomainMailboxesResult:
|
||||
if len(mailbox_ids) == 0:
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
|
||||
)
|
||||
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
|
||||
)
|
||||
|
||||
mailboxes = (
|
||||
Session.query(Mailbox)
|
||||
.filter(
|
||||
Mailbox.id.in_(mailbox_ids),
|
||||
Mailbox.user_id == user_id,
|
||||
Mailbox.verified == True, # noqa: E712
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if len(mailboxes) != len(mailbox_ids):
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
|
||||
)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Updated custom domain {custom_domain.id} mailboxes (domain={custom_domain.domain}) (mailboxes={mailboxes_as_str})",
|
||||
)
|
||||
Session.commit()
|
||||
return SetCustomDomainMailboxesResult(success=True)
|
@ -1,37 +1,293 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
from app import config
|
||||
from app.constants import DMARC_RECORD
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_cname_record
|
||||
from app.dns_utils import (
|
||||
DNSClient,
|
||||
get_network_dns_client,
|
||||
)
|
||||
from app.models import CustomDomain
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
@dataclass
|
||||
class DomainValidationResult:
|
||||
success: bool
|
||||
errors: [str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExpectedValidationRecords:
|
||||
recommended: str
|
||||
allowed: list[str]
|
||||
|
||||
|
||||
def is_mx_equivalent(
|
||||
mx_domains: dict[int, list[str]],
|
||||
expected_mx_domains: dict[int, ExpectedValidationRecords],
|
||||
) -> bool:
|
||||
"""
|
||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||
mx_domains and ref_mx_domains are list of (priority, domain)
|
||||
|
||||
The priority order is taken into account but not the priority number.
|
||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||
"""
|
||||
|
||||
expected_prios = []
|
||||
for prio in expected_mx_domains:
|
||||
expected_prios.append(prio)
|
||||
|
||||
if len(expected_prios) != len(mx_domains):
|
||||
return False
|
||||
|
||||
for prio_position, prio_value in enumerate(sorted(mx_domains.keys())):
|
||||
for domain in mx_domains[prio_value]:
|
||||
if domain not in expected_mx_domains[expected_prios[prio_position]].allowed:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CustomDomainValidation:
|
||||
def __init__(self, dkim_domain: str):
|
||||
def __init__(
|
||||
self,
|
||||
dkim_domain: str,
|
||||
dns_client: DNSClient = get_network_dns_client(),
|
||||
partner_domains: Optional[dict[int, str]] = None,
|
||||
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
|
||||
):
|
||||
self.dkim_domain = dkim_domain
|
||||
self._dkim_records = {
|
||||
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}")
|
||||
for key in ("dkim", "dkim02", "dkim03")
|
||||
self._dns_client = dns_client
|
||||
self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
|
||||
self._partner_domain_validation_prefixes = (
|
||||
partner_domains_validation_prefixes
|
||||
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
||||
)
|
||||
|
||||
def get_ownership_verification_record(
|
||||
self, domain: CustomDomain
|
||||
) -> ExpectedValidationRecords:
|
||||
prefixes = ["sl"]
|
||||
if (
|
||||
domain.partner_id is not None
|
||||
and domain.partner_id in self._partner_domain_validation_prefixes
|
||||
):
|
||||
prefixes.insert(
|
||||
0, self._partner_domain_validation_prefixes[domain.partner_id]
|
||||
)
|
||||
|
||||
if not domain.ownership_txt_token:
|
||||
domain.ownership_txt_token = random_string(30)
|
||||
Session.commit()
|
||||
|
||||
valid = [
|
||||
f"{prefix}-verification={domain.ownership_txt_token}" for prefix in prefixes
|
||||
]
|
||||
return ExpectedValidationRecords(recommended=valid[0], allowed=valid)
|
||||
|
||||
def get_expected_mx_records(
|
||||
self, domain: CustomDomain
|
||||
) -> dict[int, ExpectedValidationRecords]:
|
||||
records = {}
|
||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||
domain = self._partner_domains[domain.partner_id]
|
||||
records[10] = [f"mx1.{domain}."]
|
||||
records[20] = [f"mx2.{domain}."]
|
||||
# Default ones
|
||||
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
||||
if priority not in records:
|
||||
records[priority] = []
|
||||
records[priority].append(domain)
|
||||
|
||||
return {
|
||||
priority: ExpectedValidationRecords(
|
||||
recommended=records[priority][0], allowed=records[priority]
|
||||
)
|
||||
for priority in records
|
||||
}
|
||||
|
||||
def get_dkim_records(self) -> {str: str}:
|
||||
"""
|
||||
Get a list of dkim records to set up. It will be
|
||||
def get_expected_spf_domain(
|
||||
self, domain: CustomDomain
|
||||
) -> ExpectedValidationRecords:
|
||||
records = []
|
||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||
records.append(self._partner_domains[domain.partner_id])
|
||||
else:
|
||||
records.append(config.EMAIL_DOMAIN)
|
||||
return ExpectedValidationRecords(recommended=records[0], allowed=records)
|
||||
|
||||
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
||||
spf_domain = self.get_expected_spf_domain(domain)
|
||||
return f"v=spf1 include:{spf_domain.recommended} ~all"
|
||||
|
||||
def get_dkim_records(
|
||||
self, domain: CustomDomain
|
||||
) -> {str: ExpectedValidationRecords}:
|
||||
"""
|
||||
return self._dkim_records
|
||||
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
||||
it will return the default ones or the partner ones.
|
||||
"""
|
||||
|
||||
# By default use the default domain
|
||||
dkim_domains = [self.dkim_domain]
|
||||
if domain.partner_id is not None:
|
||||
# Domain is from a partner. Retrieve the partner config and use that domain as preferred if it exists
|
||||
partner_domain = self._partner_domains.get(domain.partner_id, None)
|
||||
if partner_domain is not None:
|
||||
dkim_domains.insert(0, partner_domain)
|
||||
|
||||
output = {}
|
||||
for key in ("dkim", "dkim02", "dkim03"):
|
||||
records = [
|
||||
f"{key}._domainkey.{dkim_domain}" for dkim_domain in dkim_domains
|
||||
]
|
||||
output[f"{key}._domainkey"] = ExpectedValidationRecords(
|
||||
recommended=records[0], allowed=records
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
||||
"""
|
||||
Check if dkim records are properly set for this custom domain.
|
||||
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
|
||||
"""
|
||||
correct_records = {}
|
||||
invalid_records = {}
|
||||
for prefix, expected_record in self.get_dkim_records():
|
||||
expected_records = self.get_dkim_records(custom_domain)
|
||||
for prefix, expected_record in expected_records.items():
|
||||
custom_record = f"{prefix}.{custom_domain.domain}"
|
||||
dkim_record = get_cname_record(custom_record)
|
||||
if dkim_record != expected_record:
|
||||
dkim_record = self._dns_client.get_cname_record(custom_record)
|
||||
if dkim_record in expected_record.allowed:
|
||||
correct_records[prefix] = custom_record
|
||||
else:
|
||||
invalid_records[custom_record] = dkim_record or "empty"
|
||||
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
|
||||
|
||||
# HACK
|
||||
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
|
||||
# the domain validated to continue seeing it as validated (although showing them the missing records).
|
||||
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
|
||||
# we will remove the dkim_verified flag.
|
||||
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
|
||||
if custom_domain.dkim_verified:
|
||||
return invalid_records
|
||||
# Check if at least the original dkim is there
|
||||
if correct_records.get("dkim._domainkey") is not None:
|
||||
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
|
||||
return invalid_records
|
||||
|
||||
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
|
||||
# rest of the code path, returning the invalid records and clearing the flag
|
||||
custom_domain.dkim_verified = len(invalid_records) == 0
|
||||
if custom_domain.dkim_verified:
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified DKIM records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return invalid_records
|
||||
|
||||
def validate_domain_ownership(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
"""
|
||||
Check if the custom_domain has added the ownership verification records
|
||||
"""
|
||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||
expected_verification_records = self.get_ownership_verification_record(
|
||||
custom_domain
|
||||
)
|
||||
found = False
|
||||
for verification_record in expected_verification_records.allowed:
|
||||
if verification_record in txt_records:
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
custom_domain.ownership_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified ownership for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
return DomainValidationResult(success=False, errors=txt_records)
|
||||
|
||||
def validate_mx_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
|
||||
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
||||
|
||||
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
||||
errors = []
|
||||
for prio in mx_domains:
|
||||
for mx_domain in mx_domains[prio]:
|
||||
errors.append(f"{prio} {mx_domain}")
|
||||
return DomainValidationResult(success=False, errors=errors)
|
||||
else:
|
||||
custom_domain.verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified MX records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
|
||||
def validate_spf_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
||||
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
||||
if len(set(expected_spf_domain.allowed).intersection(set(spf_domains))) > 0:
|
||||
custom_domain.spf_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified SPF records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
custom_domain.spf_verified = False
|
||||
Session.commit()
|
||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
|
||||
return DomainValidationResult(
|
||||
success=False,
|
||||
errors=cleaned_records,
|
||||
)
|
||||
|
||||
def validate_dmarc_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
|
||||
if DMARC_RECORD in txt_records:
|
||||
custom_domain.dmarc_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified DMARC records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
custom_domain.dmarc_verified = False
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=False, errors=txt_records)
|
||||
|
||||
def __clean_spf_records(
|
||||
self, txt_records: List[str], custom_domain: CustomDomain
|
||||
) -> List[str]:
|
||||
final_records = []
|
||||
verification_records = self.get_ownership_verification_record(custom_domain)
|
||||
for record in txt_records:
|
||||
if record not in verification_records.allowed:
|
||||
final_records.append(record)
|
||||
return final_records
|
||||
|
@ -32,6 +32,7 @@ from .views import (
|
||||
delete_account,
|
||||
notification,
|
||||
support,
|
||||
account_setting,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@ -68,4 +69,5 @@ __all__ = [
|
||||
"delete_account",
|
||||
"notification",
|
||||
"support",
|
||||
"account_setting",
|
||||
]
|
||||
|
246
app/app/dashboard/views/account_setting.py
Normal file
246
app/app/dashboard/views/account_setting.py
Normal file
@ -0,0 +1,246 @@
|
||||
import secrets
|
||||
|
||||
import arrow
|
||||
from flask import (
|
||||
render_template,
|
||||
request,
|
||||
redirect,
|
||||
url_for,
|
||||
flash,
|
||||
)
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import email_utils
|
||||
from app.config import (
|
||||
URL,
|
||||
FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.dashboard.views.mailbox_detail import ChangeEmailForm
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
personal_email_already_used,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
BlockBehaviourEnum,
|
||||
PlanEnum,
|
||||
ResetPasswordCode,
|
||||
EmailChange,
|
||||
User,
|
||||
Alias,
|
||||
AliasGeneratorEnum,
|
||||
SenderFormatEnum,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.proton_unlink import perform_proton_account_unlink
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
canonicalize_email,
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/account_setting", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("5/minute", methods=["POST"])
|
||||
def account_setting():
|
||||
change_email_form = ChangeEmailForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
pending_email = email_change.new_email
|
||||
else:
|
||||
pending_email = None
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-email":
|
||||
if change_email_form.validate():
|
||||
# whether user can proceed with the email update
|
||||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
new_email_valid = False
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
# a pending email change with the same email exists from another user
|
||||
elif EmailChange.get_by(new_email=new_email):
|
||||
other_email_change: EmailChange = EmailChange.get_by(
|
||||
new_email=new_email
|
||||
)
|
||||
LOG.w(
|
||||
"Another user has a pending %s with the same email address. Current user:%s",
|
||||
other_email_change,
|
||||
current_user,
|
||||
)
|
||||
|
||||
if other_email_change.is_expired():
|
||||
LOG.d(
|
||||
"delete the expired email change %s", other_email_change
|
||||
)
|
||||
EmailChange.delete(other_email_change.id)
|
||||
Session.commit()
|
||||
else:
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
|
||||
if new_email_valid:
|
||||
email_change = EmailChange.create(
|
||||
user_id=current_user.id,
|
||||
code=random_string(
|
||||
60
|
||||
), # todo: make sure the code is unique
|
||||
new_email=new_email,
|
||||
)
|
||||
Session.commit()
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash(
|
||||
"A confirmation email is on the way, please check your inbox",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.account_setting"))
|
||||
elif request.form.get("form-name") == "change-password":
|
||||
flash(
|
||||
"You are going to receive an email containing instructions to change your password",
|
||||
"success",
|
||||
)
|
||||
send_reset_password_email(current_user)
|
||||
return redirect(url_for("dashboard.account_setting"))
|
||||
elif request.form.get("form-name") == "send-full-user-report":
|
||||
if ExportUserDataJob(current_user).store_job_in_db():
|
||||
flash(
|
||||
"You will receive your SimpleLogin data via email shortly",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
flash("An export of your data is currently in progress", "error")
|
||||
|
||||
partner_sub = None
|
||||
partner_name = None
|
||||
|
||||
return render_template(
|
||||
"dashboard/account_setting.html",
|
||||
csrf_form=csrf_form,
|
||||
PlanEnum=PlanEnum,
|
||||
SenderFormatEnum=SenderFormatEnum,
|
||||
BlockBehaviourEnum=BlockBehaviourEnum,
|
||||
change_email_form=change_email_form,
|
||||
pending_email=pending_email,
|
||||
AliasGeneratorEnum=AliasGeneratorEnum,
|
||||
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
|
||||
partner_sub=partner_sub,
|
||||
partner_name=partner_name,
|
||||
FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(user):
|
||||
"""
|
||||
generate a new ResetPasswordCode and send it over email to user
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=secrets.token_urlsafe(32)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
|
||||
|
||||
email_utils.send_reset_password_email(user, reset_password_link)
|
||||
|
||||
|
||||
def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||
"""
|
||||
send confirmation email to the new email address
|
||||
"""
|
||||
|
||||
link = f"{URL}/auth/change_email?code={email_change.code}"
|
||||
|
||||
email_utils.send_change_email(user, email_change.new_email, link)
|
||||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@limiter.limit("5/hour")
|
||||
@login_required
|
||||
@sudo_required
|
||||
def resend_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
email_change.expired = arrow.now().shift(hours=12)
|
||||
Session.commit()
|
||||
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash("A confirmation email is on the way, please check your inbox", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def cancel_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
Session.commit()
|
||||
flash("Your email change is cancelled", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def unlink_proton_account():
|
||||
csrf_form = CSRFValidationForm()
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if not perform_proton_account_unlink(current_user):
|
||||
flash("Account cannot be unlinked", "warning")
|
||||
else:
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
@ -1,5 +1,6 @@
|
||||
from dataclasses import dataclass
|
||||
from operator import or_
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, flash
|
||||
from flask import url_for
|
||||
@ -9,13 +10,11 @@ from sqlalchemy import and_, func, case
|
||||
from wtforms import StringField, validators, ValidationError
|
||||
|
||||
# Need to import directly from config to allow modification from the tests
|
||||
from app import config, parallel_limiter
|
||||
from app import config, parallel_limiter, contact_utils
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.contact_utils import ContactCreateError
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
generate_reply_email,
|
||||
parse_full_address,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
@ -24,8 +23,8 @@ from app.errors import (
|
||||
ErrContactAlreadyExists,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, EmailLog, User
|
||||
from app.utils import sanitize_email, CSRFValidationForm
|
||||
from app.models import Alias, Contact, EmailLog
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def email_validator():
|
||||
@ -51,7 +50,7 @@ def email_validator():
|
||||
return _check
|
||||
|
||||
|
||||
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
def create_contact(alias: Alias, contact_address: str) -> Contact:
|
||||
"""
|
||||
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
||||
Can throw exceptions:
|
||||
@ -61,37 +60,23 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
"""
|
||||
if not contact_address:
|
||||
raise ErrAddressInvalid("Empty address")
|
||||
try:
|
||||
contact_name, contact_email = parse_full_address(contact_address)
|
||||
except ValueError:
|
||||
output = contact_utils.create_contact(email=contact_address, alias=alias)
|
||||
if output.error == ContactCreateError.InvalidEmail:
|
||||
raise ErrAddressInvalid(contact_address)
|
||||
|
||||
contact_email = sanitize_email(contact_email)
|
||||
if not is_valid_email(contact_email):
|
||||
raise ErrAddressInvalid(contact_email)
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
if contact:
|
||||
raise ErrContactAlreadyExists(contact)
|
||||
|
||||
if not user.can_create_contacts():
|
||||
elif output.error == ContactCreateError.NotAllowed:
|
||||
raise ErrContactErrorUpgradeNeeded()
|
||||
elif output.error is not None:
|
||||
raise ErrAddressInvalid("Invalid address")
|
||||
elif not output.created:
|
||||
raise ErrContactAlreadyExists(output.contact)
|
||||
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
)
|
||||
|
||||
contact = output.contact
|
||||
LOG.d(
|
||||
"create reverse-alias for %s %s, reverse alias:%s",
|
||||
contact_address,
|
||||
alias,
|
||||
contact.reply_email,
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return contact
|
||||
|
||||
@ -207,7 +192,7 @@ def get_contact_infos(
|
||||
|
||||
|
||||
def delete_contact(alias: Alias, contact_id: int):
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -215,6 +200,11 @@ def delete_contact(alias: Alias, contact_id: int):
|
||||
flash("You cannot delete reverse-alias", "warning")
|
||||
else:
|
||||
delete_contact_email = contact.website_email
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.DeleteContact,
|
||||
message=f"Delete contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
@ -237,7 +227,10 @@ def alias_contact_manager(alias_id):
|
||||
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
page = int(request.args.get("page"))
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
query = request.args.get("query") or ""
|
||||
|
||||
@ -261,7 +254,7 @@ def alias_contact_manager(alias_id):
|
||||
if new_contact_form.validate():
|
||||
contact_address = new_contact_form.email.data.strip()
|
||||
try:
|
||||
contact = create_contact(current_user, alias, contact_address)
|
||||
contact = create_contact(alias, contact_address)
|
||||
except (
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
ErrAddressInvalid,
|
||||
|
@ -1,9 +1,13 @@
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from flask_login import login_required, current_user
|
||||
from app.alias_utils import alias_export_csv
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.extensions import limiter
|
||||
|
||||
|
||||
@dashboard_bp.route("/alias_export", methods=["GET"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("2/minute")
|
||||
def alias_export_route():
|
||||
return alias_export_csv(current_user)
|
||||
|
@ -7,6 +7,7 @@ from flask import render_template, redirect, url_for, flash, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import config
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.alias_utils import transfer_alias
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
@ -57,6 +58,12 @@ def alias_transfer_send_route(alias_id):
|
||||
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
||||
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
||||
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias,
|
||||
AliasAuditLogAction.InitiateTransferAlias,
|
||||
"Initiated alias transfer",
|
||||
)
|
||||
Session.commit()
|
||||
alias_transfer_url = (
|
||||
config.URL
|
||||
|
@ -3,9 +3,11 @@ from flask import render_template, flash, request, redirect, url_for
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import s3
|
||||
from app.config import JOB_BATCH_IMPORT
|
||||
from app.constants import JobType
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import File, BatchImport, Job
|
||||
from app.utils import random_string, CSRFValidationForm
|
||||
@ -13,6 +15,8 @@ from app.utils import random_string, CSRFValidationForm
|
||||
|
||||
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("10/minute", methods=["POST"])
|
||||
def batch_import_route():
|
||||
# only for users who have custom domains
|
||||
if not current_user.verified_custom_domains():
|
||||
@ -37,7 +41,7 @@ def batch_import_route():
|
||||
return redirect(request.url)
|
||||
if len(batch_imports) > 10:
|
||||
flash(
|
||||
"You have too many imports already. Wait until some get cleaned up",
|
||||
"You have too many imports already. Please wait until some get cleaned up",
|
||||
"error",
|
||||
)
|
||||
return render_template(
|
||||
@ -60,7 +64,7 @@ def batch_import_route():
|
||||
|
||||
# Schedule batch import job
|
||||
Job.create(
|
||||
name=JOB_BATCH_IMPORT,
|
||||
name=JobType.BATCH_IMPORT.value,
|
||||
payload={"batch_import_id": bi.id},
|
||||
run_at=arrow.now(),
|
||||
)
|
||||
|
@ -1,8 +1,11 @@
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import Contact
|
||||
@ -20,7 +23,7 @@ class PGPContactForm(FlaskForm):
|
||||
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def contact_detail_route(contact_id):
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
if not contact or contact.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
@ -50,6 +53,11 @@ def contact_detail_route(contact_id):
|
||||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Added PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"PGP public key for {contact.email} is saved successfully",
|
||||
@ -62,6 +70,11 @@ def contact_detail_route(contact_id):
|
||||
)
|
||||
elif pgp_form.action.data == "remove":
|
||||
# Free user can decide to remove contact PGP key
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Removed PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||
)
|
||||
contact.pgp_public_key = None
|
||||
contact.pgp_finger_print = None
|
||||
Session.commit()
|
||||
|
@ -1,17 +1,15 @@
|
||||
import arrow
|
||||
from flask import render_template, flash, redirect, url_for, request
|
||||
from flask import render_template, flash, redirect, url_for
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
||||
from app.coupon_utils import redeem_coupon, CouponUserCannotRedeemError
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
ManualSubscription,
|
||||
Coupon,
|
||||
Subscription,
|
||||
AppleSubscription,
|
||||
CoinbaseSubscription,
|
||||
@ -58,56 +56,23 @@ def coupon_route():
|
||||
|
||||
if coupon_form.validate_on_submit():
|
||||
code = coupon_form.code.data
|
||||
|
||||
coupon: Coupon = Coupon.get_by(code=code)
|
||||
if coupon and not coupon.used:
|
||||
if coupon.expires_date and coupon.expires_date < arrow.now():
|
||||
flash(
|
||||
f"The coupon was expired on {coupon.expires_date.humanize()}",
|
||||
"error",
|
||||
)
|
||||
return redirect(request.url)
|
||||
|
||||
updated = (
|
||||
Session.query(Coupon)
|
||||
.filter_by(code=code, used=False)
|
||||
.update({"used_by_user_id": current_user.id, "used": True})
|
||||
)
|
||||
if updated != 1:
|
||||
flash("Coupon is not valid", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||
user_id=current_user.id
|
||||
)
|
||||
if manual_sub:
|
||||
# renew existing subscription
|
||||
if manual_sub.end_at > arrow.now():
|
||||
manual_sub.end_at = manual_sub.end_at.shift(years=coupon.nb_year)
|
||||
else:
|
||||
manual_sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Your current subscription is extended to {manual_sub.end_at.humanize()}",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
ManualSubscription.create(
|
||||
user_id=current_user.id,
|
||||
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
|
||||
comment="using coupon code",
|
||||
is_giveaway=coupon.is_giveaway,
|
||||
commit=True,
|
||||
)
|
||||
try:
|
||||
coupon = redeem_coupon(code, current_user)
|
||||
if coupon:
|
||||
flash(
|
||||
"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
else:
|
||||
flash(f"Code *{code}* expired or invalid", "warning")
|
||||
else:
|
||||
flash(
|
||||
"This coupon cannot be redeemed. It's invalid or has expired",
|
||||
"warning",
|
||||
)
|
||||
except CouponUserCannotRedeemError:
|
||||
flash(
|
||||
"You have an active subscription. Please remove it before redeeming a coupon",
|
||||
"warning",
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/coupon.html",
|
||||
|
@ -5,11 +5,9 @@ from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY
|
||||
from app.custom_domain_utils import create_custom_domain
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import get_email_domain_part
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
|
||||
from app.models import CustomDomain
|
||||
|
||||
|
||||
class NewCustomDomainForm(FlaskForm):
|
||||
@ -23,13 +21,12 @@ class NewCustomDomainForm(FlaskForm):
|
||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||
def custom_domain():
|
||||
custom_domains = CustomDomain.filter_by(
|
||||
user_id=current_user.id, is_sl_subdomain=False
|
||||
user_id=current_user.id,
|
||||
is_sl_subdomain=False,
|
||||
pending_deletion=False,
|
||||
).all()
|
||||
mailboxes = current_user.mailboxes()
|
||||
new_custom_domain_form = NewCustomDomainForm()
|
||||
|
||||
errors = {}
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "create":
|
||||
if not current_user.is_premium():
|
||||
@ -37,87 +34,25 @@ def custom_domain():
|
||||
return redirect(url_for("dashboard.custom_domain"))
|
||||
|
||||
if new_custom_domain_form.validate():
|
||||
new_domain = new_custom_domain_form.domain.data.lower().strip()
|
||||
|
||||
if new_domain.startswith("http://"):
|
||||
new_domain = new_domain[len("http://") :]
|
||||
|
||||
if new_domain.startswith("https://"):
|
||||
new_domain = new_domain[len("https://") :]
|
||||
|
||||
if SLDomain.get_by(domain=new_domain):
|
||||
flash("A custom domain cannot be a built-in domain.", "error")
|
||||
elif CustomDomain.get_by(domain=new_domain):
|
||||
flash(f"{new_domain} already used", "error")
|
||||
elif get_email_domain_part(current_user.email) == new_domain:
|
||||
flash(
|
||||
"You cannot add a domain that you are currently using for your personal email. "
|
||||
"Please change your personal email to your real email",
|
||||
"error",
|
||||
)
|
||||
elif Mailbox.filter(
|
||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
|
||||
).first():
|
||||
flash(
|
||||
f"{new_domain} already used in a SimpleLogin mailbox", "error"
|
||||
)
|
||||
else:
|
||||
new_custom_domain = CustomDomain.create(
|
||||
domain=new_domain, user_id=current_user.id
|
||||
)
|
||||
# new domain has ownership verified if its parent has the ownership verified
|
||||
for root_cd in current_user.custom_domains:
|
||||
if (
|
||||
new_domain.endswith("." + root_cd.domain)
|
||||
and root_cd.ownership_verified
|
||||
):
|
||||
LOG.i(
|
||||
"%s ownership verified thanks to %s",
|
||||
new_custom_domain,
|
||||
root_cd,
|
||||
)
|
||||
new_custom_domain.ownership_verified = True
|
||||
|
||||
Session.commit()
|
||||
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
if mailbox_ids:
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if (
|
||||
not mailbox
|
||||
or mailbox.user_id != current_user.id
|
||||
or not mailbox.verified
|
||||
):
|
||||
flash("Something went wrong, please retry", "warning")
|
||||
return redirect(url_for("dashboard.custom_domain"))
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(
|
||||
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
flash(
|
||||
f"New domain {new_custom_domain.domain} is created", "success"
|
||||
)
|
||||
|
||||
res = create_custom_domain(
|
||||
user=current_user, domain=new_custom_domain_form.domain.data
|
||||
)
|
||||
if res.success:
|
||||
flash(f"New domain {res.instance.domain} is created", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns",
|
||||
custom_domain_id=new_custom_domain.id,
|
||||
custom_domain_id=res.instance.id,
|
||||
)
|
||||
)
|
||||
else:
|
||||
flash(res.message, res.message_category)
|
||||
if res.redirect:
|
||||
return redirect(url_for(res.redirect))
|
||||
|
||||
return render_template(
|
||||
"dashboard/custom_domain.html",
|
||||
custom_domains=custom_domains,
|
||||
new_custom_domain_form=new_custom_domain_form,
|
||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||
errors=errors,
|
||||
mailboxes=mailboxes,
|
||||
)
|
||||
|
@ -3,11 +3,12 @@ from flask import flash, redirect, url_for, request, render_template
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
|
||||
from app.config import JOB_DELETE_ACCOUNT
|
||||
from app.constants import JobType
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.log import LOG
|
||||
from app.models import Subscription, Job
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class DeleteDirForm(FlaskForm):
|
||||
@ -33,8 +34,13 @@ def delete_account():
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.w("schedule delete account job for %s", current_user)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
||||
)
|
||||
Job.create(
|
||||
name=JOB_DELETE_ACCOUNT,
|
||||
name=JobType.DELETE_ACCOUNT.value,
|
||||
payload={"user_id": current_user.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
|
@ -1,3 +1,5 @@
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
@ -20,6 +22,7 @@ from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.errors import DirectoryInTrashError
|
||||
from app.models import Directory, Mailbox, DirectoryMailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class NewDirForm(FlaskForm):
|
||||
@ -69,7 +72,9 @@ def directory():
|
||||
if not delete_dir_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||
dir_obj: Optional[Directory] = Directory.get(
|
||||
delete_dir_form.directory_id.data
|
||||
)
|
||||
|
||||
if not dir_obj:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -79,6 +84,11 @@ def directory():
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
name = dir_obj.name
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.DeleteDirectory,
|
||||
message=f"Delete directory {dir_obj.id} ({dir_obj.name})",
|
||||
)
|
||||
Directory.delete(dir_obj.id)
|
||||
Session.commit()
|
||||
flash(f"Directory {name} has been deleted", "success")
|
||||
@ -90,7 +100,7 @@ def directory():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = toggle_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -103,6 +113,11 @@ def directory():
|
||||
dir_obj.disabled = True
|
||||
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
||||
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateDirectory,
|
||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) set disabled = {dir_obj.disabled}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
@ -112,7 +127,7 @@ def directory():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = update_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -143,6 +158,12 @@ def directory():
|
||||
for mailbox in mailboxes:
|
||||
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
||||
|
||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateDirectory,
|
||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) mailboxes ({mailboxes_as_str})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"Directory {dir_obj.name} has been updated", "success")
|
||||
|
||||
@ -181,6 +202,11 @@ def directory():
|
||||
new_dir = Directory.create(
|
||||
name=new_dir_name, user_id=current_user.id
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.CreateDirectory,
|
||||
message=f"New directory {new_dir.name} ({new_dir.name})",
|
||||
)
|
||||
except DirectoryInTrashError:
|
||||
flash(
|
||||
f"{new_dir_name} has been used before and cannot be reused",
|
||||
|
@ -1,33 +1,26 @@
|
||||
import re
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators, IntegerField
|
||||
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
||||
from app.constants import DMARC_RECORD
|
||||
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
||||
from app.custom_domain_validation import CustomDomainValidation
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.dns_utils import (
|
||||
get_mx_domains,
|
||||
get_spf_domain,
|
||||
get_txt_record,
|
||||
is_mx_equivalent,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
CustomDomain,
|
||||
Alias,
|
||||
DomainDeletedAlias,
|
||||
Mailbox,
|
||||
DomainMailbox,
|
||||
AutoCreateRule,
|
||||
AutoCreateRuleMailbox,
|
||||
Job,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string, CSRFValidationForm
|
||||
|
||||
|
||||
@ -44,13 +37,9 @@ def domain_detail_dns(custom_domain_id):
|
||||
custom_domain.ownership_txt_token = random_string(30)
|
||||
Session.commit()
|
||||
|
||||
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
|
||||
|
||||
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||
|
||||
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
|
||||
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
|
||||
|
||||
@ -59,15 +48,14 @@ def domain_detail_dns(custom_domain_id):
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "check-ownership":
|
||||
txt_records = get_txt_record(custom_domain.domain)
|
||||
|
||||
if custom_domain.get_ownership_dns_txt_value() in txt_records:
|
||||
ownership_validation_result = domain_validator.validate_domain_ownership(
|
||||
custom_domain
|
||||
)
|
||||
if ownership_validation_result.success:
|
||||
flash(
|
||||
"Domain ownership is verified. Please proceed to the other records setup",
|
||||
"success",
|
||||
)
|
||||
custom_domain.ownership_verified = True
|
||||
Session.commit()
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns",
|
||||
@ -78,36 +66,28 @@ def domain_detail_dns(custom_domain_id):
|
||||
else:
|
||||
flash("We can't find the needed TXT record", "error")
|
||||
ownership_ok = False
|
||||
ownership_errors = txt_records
|
||||
ownership_errors = ownership_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-mx":
|
||||
mx_domains = get_mx_domains(custom_domain.domain)
|
||||
|
||||
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
|
||||
flash("The MX record is not correctly set", "warning")
|
||||
|
||||
mx_ok = False
|
||||
# build mx_errors to show to user
|
||||
mx_errors = [
|
||||
f"{priority} {domain}" for (priority, domain) in mx_domains
|
||||
]
|
||||
else:
|
||||
mx_validation_result = domain_validator.validate_mx_records(custom_domain)
|
||||
if mx_validation_result.success:
|
||||
flash(
|
||||
"Your domain can start receiving emails. You can now use it to create alias",
|
||||
"success",
|
||||
)
|
||||
custom_domain.verified = True
|
||||
Session.commit()
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
else:
|
||||
flash("The MX record is not correctly set", "warning")
|
||||
mx_ok = False
|
||||
mx_errors = mx_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-spf":
|
||||
spf_domains = get_spf_domain(custom_domain.domain)
|
||||
if EMAIL_DOMAIN in spf_domains:
|
||||
custom_domain.spf_verified = True
|
||||
Session.commit()
|
||||
spf_validation_result = domain_validator.validate_spf_records(custom_domain)
|
||||
if spf_validation_result.success:
|
||||
flash("SPF is setup correctly", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
@ -115,14 +95,12 @@ def domain_detail_dns(custom_domain_id):
|
||||
)
|
||||
)
|
||||
else:
|
||||
custom_domain.spf_verified = False
|
||||
Session.commit()
|
||||
flash(
|
||||
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
|
||||
"warning",
|
||||
)
|
||||
spf_ok = False
|
||||
spf_errors = get_txt_record(custom_domain.domain)
|
||||
spf_errors = spf_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-dkim":
|
||||
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
|
||||
@ -138,10 +116,10 @@ def domain_detail_dns(custom_domain_id):
|
||||
flash("DKIM: the CNAME record is not correctly set", "warning")
|
||||
|
||||
elif request.form.get("form-name") == "check-dmarc":
|
||||
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
|
||||
if dmarc_record in txt_records:
|
||||
custom_domain.dmarc_verified = True
|
||||
Session.commit()
|
||||
dmarc_validation_result = domain_validator.validate_dmarc_records(
|
||||
custom_domain
|
||||
)
|
||||
if dmarc_validation_result.success:
|
||||
flash("DMARC is setup correctly", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
@ -149,19 +127,23 @@ def domain_detail_dns(custom_domain_id):
|
||||
)
|
||||
)
|
||||
else:
|
||||
custom_domain.dmarc_verified = False
|
||||
Session.commit()
|
||||
flash(
|
||||
"DMARC: The TXT record is not correctly set",
|
||||
"warning",
|
||||
)
|
||||
dmarc_ok = False
|
||||
dmarc_errors = txt_records
|
||||
dmarc_errors = dmarc_validation_result.errors
|
||||
|
||||
return render_template(
|
||||
"dashboard/domain_detail/dns.html",
|
||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||
dkim_records=domain_validator.get_dkim_records(),
|
||||
ownership_records=domain_validator.get_ownership_verification_record(
|
||||
custom_domain
|
||||
),
|
||||
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
||||
dkim_records=domain_validator.get_dkim_records(custom_domain),
|
||||
spf_record=domain_validator.get_expected_spf_record(custom_domain),
|
||||
dmarc_record=DMARC_RECORD,
|
||||
**locals(),
|
||||
)
|
||||
|
||||
@ -183,6 +165,11 @@ def domain_detail(custom_domain_id):
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "switch-catch-all":
|
||||
custom_domain.catch_all = not custom_domain.catch_all
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) catch all to {custom_domain.catch_all}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if custom_domain.catch_all:
|
||||
@ -201,6 +188,11 @@ def domain_detail(custom_domain_id):
|
||||
elif request.form.get("form-name") == "set-name":
|
||||
if request.form.get("action") == "save":
|
||||
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Default alias name for Domain {custom_domain.domain} has been set",
|
||||
@ -208,6 +200,11 @@ def domain_detail(custom_domain_id):
|
||||
)
|
||||
else:
|
||||
custom_domain.name = None
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Cleared custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
||||
@ -221,6 +218,11 @@ def domain_detail(custom_domain_id):
|
||||
custom_domain.random_prefix_generation = (
|
||||
not custom_domain.random_prefix_generation
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) random prefix generation to {custom_domain.random_prefix_generation}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if custom_domain.random_prefix_generation:
|
||||
@ -238,40 +240,16 @@ def domain_detail(custom_domain_id):
|
||||
)
|
||||
elif request.form.get("form-name") == "update":
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if (
|
||||
not mailbox
|
||||
or mailbox.user_id != current_user.id
|
||||
or not mailbox.verified
|
||||
):
|
||||
flash("Something went wrong, please retry", "warning")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
mailboxes.append(mailbox)
|
||||
result = set_custom_domain_mailboxes(
|
||||
user_id=current_user.id,
|
||||
custom_domain=custom_domain,
|
||||
mailbox_ids=mailbox_ids,
|
||||
)
|
||||
|
||||
if not mailboxes:
|
||||
flash("You must select at least 1 mailbox", "warning")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
Session.commit()
|
||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||
if result.success:
|
||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||
else:
|
||||
flash(result.reason.value, "warning")
|
||||
|
||||
return redirect(
|
||||
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
||||
@ -279,16 +257,8 @@ def domain_detail(custom_domain_id):
|
||||
|
||||
elif request.form.get("form-name") == "delete":
|
||||
name = custom_domain.domain
|
||||
LOG.d("Schedule deleting %s", custom_domain)
|
||||
|
||||
# Schedule delete domain job
|
||||
LOG.w("schedule delete domain job for %s", custom_domain)
|
||||
Job.create(
|
||||
name=JOB_DELETE_DOMAIN,
|
||||
payload={"custom_domain_id": custom_domain.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
delete_custom_domain(custom_domain)
|
||||
|
||||
flash(
|
||||
f"{name} scheduled for deletion."
|
||||
|
@ -6,15 +6,15 @@ from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import PasswordField, validators
|
||||
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.config import CONNECT_WITH_PROTON, OIDC_CLIENT_ID, CONNECT_WITH_OIDC_ICON
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import PartnerUser
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.models import PartnerUser, SocialAuth
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
_SUDO_GAP = 900
|
||||
_SUDO_GAP = 120
|
||||
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
@ -51,11 +51,19 @@ def enter_sudo():
|
||||
if not partner_user or partner_user.partner_id != get_proton_partner().id:
|
||||
proton_enabled = False
|
||||
|
||||
oidc_enabled = OIDC_CLIENT_ID is not None
|
||||
if oidc_enabled:
|
||||
oidc_enabled = (
|
||||
SocialAuth.get_by(user_id=current_user.id, social="oidc") is not None
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/enter_sudo.html",
|
||||
password_check_form=password_check_form,
|
||||
next=request.args.get("next"),
|
||||
connect_with_proton=proton_enabled,
|
||||
connect_with_oidc=oidc_enabled,
|
||||
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
|
||||
)
|
||||
|
||||
|
||||
|
@ -12,6 +12,7 @@ from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
AliasDeleteReason,
|
||||
AliasGeneratorEnum,
|
||||
User,
|
||||
EmailLog,
|
||||
@ -70,7 +71,10 @@ def index():
|
||||
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
page = int(request.args.get("page"))
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
highlight_alias_id = None
|
||||
if request.args.get("highlight_alias_id"):
|
||||
@ -141,12 +145,16 @@ def index():
|
||||
)
|
||||
|
||||
if request.form.get("form-name") == "delete-alias":
|
||||
LOG.d("delete alias %s", alias)
|
||||
LOG.i(f"User {current_user} requested deletion of alias {alias}")
|
||||
email = alias.email
|
||||
alias_utils.delete_alias(alias, current_user)
|
||||
alias_utils.delete_alias(
|
||||
alias, current_user, AliasDeleteReason.ManualAction, commit=True
|
||||
)
|
||||
flash(f"Alias {email} has been deleted", "success")
|
||||
elif request.form.get("form-name") == "disable-alias":
|
||||
alias.enabled = False
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False from dashboard"
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"Alias {alias.email} has been disabled", "success")
|
||||
|
||||
|
@ -3,11 +3,9 @@ from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.config import ADMIN_EMAIL
|
||||
from app import parallel_limiter
|
||||
from app.coupon_utils import redeem_lifetime_coupon
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email
|
||||
from app.models import LifetimeCoupon
|
||||
|
||||
|
||||
class CouponForm(FlaskForm):
|
||||
@ -16,6 +14,7 @@ class CouponForm(FlaskForm):
|
||||
|
||||
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock()
|
||||
def lifetime_licence():
|
||||
if current_user.lifetime:
|
||||
flash("You already have a lifetime licence", "warning")
|
||||
@ -32,28 +31,12 @@ def lifetime_licence():
|
||||
|
||||
if coupon_form.validate_on_submit():
|
||||
code = coupon_form.code.data
|
||||
|
||||
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=code)
|
||||
if coupon and coupon.nb_used > 0:
|
||||
coupon.nb_used -= 1
|
||||
current_user.lifetime = True
|
||||
current_user.lifetime_coupon_id = coupon.id
|
||||
if coupon.paid:
|
||||
current_user.paid_lifetime = True
|
||||
Session.commit()
|
||||
|
||||
# notify admin
|
||||
send_email(
|
||||
ADMIN_EMAIL,
|
||||
subject=f"User {current_user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
||||
plaintext="",
|
||||
html="",
|
||||
)
|
||||
|
||||
coupon = redeem_lifetime_coupon(code, current_user)
|
||||
if coupon:
|
||||
flash("You are upgraded to lifetime premium!", "success")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
else:
|
||||
flash(f"Code *{code}* expired or invalid", "warning")
|
||||
flash("Coupon code expired or invalid", "warning")
|
||||
|
||||
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)
|
||||
|
@ -1,8 +1,8 @@
|
||||
import base64
|
||||
import binascii
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
@ -10,19 +10,13 @@ from itsdangerous import TimestampSigner
|
||||
from wtforms import validators, IntegerField
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import MAILBOX_SECRET, URL, JOB_DELETE_MAILBOX
|
||||
from app import parallel_limiter, mailbox_utils, user_settings
|
||||
from app.config import MAILBOX_SECRET
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
mailbox_already_used,
|
||||
render,
|
||||
send_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.models import Mailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
@ -58,120 +52,61 @@ def mailbox_route():
|
||||
if not delete_mailbox_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
mailbox = Mailbox.get(delete_mailbox_form.mailbox_id.data)
|
||||
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("Invalid mailbox. Refresh the page", "warning")
|
||||
try:
|
||||
mailbox = mailbox_utils.delete_mailbox(
|
||||
current_user,
|
||||
delete_mailbox_form.mailbox_id.data,
|
||||
delete_mailbox_form.transfer_mailbox_id.data,
|
||||
)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(e.msg, "warning")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if mailbox.id == current_user.default_mailbox_id:
|
||||
flash("You cannot delete default mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
transfer_mailbox_id = delete_mailbox_form.transfer_mailbox_id.data
|
||||
if transfer_mailbox_id and transfer_mailbox_id > 0:
|
||||
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
|
||||
|
||||
if not transfer_mailbox or transfer_mailbox.user_id != current_user.id:
|
||||
flash(
|
||||
"You must transfer the aliases to a mailbox you own.", "error"
|
||||
)
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if transfer_mailbox.id == mailbox.id:
|
||||
flash(
|
||||
"You can not transfer the aliases to the mailbox you want to delete.",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if not transfer_mailbox.verified:
|
||||
flash("Your new mailbox is not verified", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.w(
|
||||
f"schedule delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
|
||||
)
|
||||
Job.create(
|
||||
name=JOB_DELETE_MAILBOX,
|
||||
payload={
|
||||
"mailbox_id": mailbox.id,
|
||||
"transfer_mailbox_id": transfer_mailbox_id
|
||||
if transfer_mailbox_id > 0
|
||||
else None,
|
||||
},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(
|
||||
f"Mailbox {mailbox.email} scheduled for deletion."
|
||||
f"You will receive a confirmation email when the deletion is finished",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if request.form.get("form-name") == "set-default":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
mailbox_id = request.form.get("mailbox_id")
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
try:
|
||||
mailbox_id = request.form.get("mailbox_id")
|
||||
mailbox = user_settings.set_default_mailbox(current_user, mailbox_id)
|
||||
except user_settings.CannotSetMailbox as e:
|
||||
flash(e.msg, "warning")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if mailbox.id == current_user.default_mailbox_id:
|
||||
flash("This mailbox is already default one", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if not mailbox.verified:
|
||||
flash("Cannot set unverified mailbox as default", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
current_user.default_mailbox_id = mailbox.id
|
||||
Session.commit()
|
||||
flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success")
|
||||
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
elif request.form.get("form-name") == "create":
|
||||
if not current_user.is_premium():
|
||||
flash("Only premium plan can add additional mailbox", "warning")
|
||||
if not new_mailbox_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
mailbox_email = new_mailbox_form.email.data.lower().strip().replace(" ", "")
|
||||
try:
|
||||
mailbox = mailbox_utils.create_mailbox(
|
||||
current_user, mailbox_email
|
||||
).mailbox
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(e.msg, "warning")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if new_mailbox_form.validate():
|
||||
mailbox_email = (
|
||||
new_mailbox_form.email.data.lower().strip().replace(" ", "")
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox.email}.",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.mailbox_detail_route",
|
||||
mailbox_id=mailbox.id,
|
||||
)
|
||||
|
||||
if not is_valid_email(mailbox_email):
|
||||
flash(f"{mailbox_email} invalid", "error")
|
||||
elif mailbox_already_used(mailbox_email, current_user):
|
||||
flash(f"{mailbox_email} already used", "error")
|
||||
elif not email_can_be_used_as_mailbox(mailbox_email):
|
||||
flash(f"You cannot use {mailbox_email}.", "error")
|
||||
else:
|
||||
new_mailbox = Mailbox.create(
|
||||
email=mailbox_email, user_id=current_user.id
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
send_verification_email(current_user, new_mailbox)
|
||||
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox_email}.",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.mailbox_detail_route",
|
||||
mailbox_id=new_mailbox.id,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/mailbox.html",
|
||||
@ -182,34 +117,31 @@ def mailbox_route():
|
||||
)
|
||||
|
||||
|
||||
def send_verification_email(user, mailbox):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
|
||||
b64_data = base64.urlsafe_b64encode(encoded_data)
|
||||
mailbox_id_signed = s.sign(b64_data).decode()
|
||||
verification_url = (
|
||||
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
||||
)
|
||||
send_email(
|
||||
mailbox.email,
|
||||
f"Please confirm your mailbox {mailbox.email}",
|
||||
render(
|
||||
"transactional/verify-mailbox.txt.jinja2",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox.html",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/mailbox_verify")
|
||||
@login_required
|
||||
def mailbox_verify():
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
if not mailbox_id:
|
||||
LOG.i("Missing mailbox_id")
|
||||
flash("You followed an invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
code = request.args.get("code")
|
||||
if not code:
|
||||
# Old way
|
||||
return verify_with_signed_secret(mailbox_id)
|
||||
|
||||
try:
|
||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
|
||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
|
||||
|
||||
def verify_with_signed_secret(request: str):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_verify_request = request.args.get("mailbox_id")
|
||||
try:
|
||||
@ -227,7 +159,7 @@ def mailbox_verify():
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_id = mailbox_data[0]
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
@ -237,6 +169,11 @@ def mailbox_verify():
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
mailbox.verified = True
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.VerifyMailbox,
|
||||
message=f"Verified mailbox {mailbox.id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
|
@ -1,34 +1,39 @@
|
||||
from smtplib import SMTPRecipientsRefused
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from itsdangerous import TimestampSigner
|
||||
from wtforms import validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
from wtforms.fields.simple import StringField
|
||||
|
||||
from app import mailbox_utils
|
||||
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
||||
from app.config import URL
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import email_can_be_used_as_mailbox
|
||||
from app.email_utils import mailbox_already_used, render, send_email
|
||||
from app.log import LOG
|
||||
from app.models import Alias, AuthorizedAddress
|
||||
from app.extensions import limiter
|
||||
from app.mailbox_utils import (
|
||||
perform_mailbox_email_change,
|
||||
MailboxEmailChangeError,
|
||||
MailboxError,
|
||||
)
|
||||
from app.models import AuthorizedAddress
|
||||
from app.models import Mailbox
|
||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email, CSRFValidationForm
|
||||
|
||||
|
||||
class ChangeEmailForm(FlaskForm):
|
||||
email = EmailField(
|
||||
email = StringField(
|
||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("20/minute", methods=["POST"])
|
||||
def mailbox_detail_route(mailbox_id):
|
||||
mailbox: Mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
@ -51,41 +56,30 @@ def mailbox_detail_route(mailbox_id):
|
||||
request.form.get("form-name") == "update-email"
|
||||
and change_email_form.validate_on_submit()
|
||||
):
|
||||
new_email = sanitize_email(change_email_form.email.data)
|
||||
if new_email != mailbox.email and not pending_email:
|
||||
# check if this email is not already used
|
||||
if mailbox_already_used(new_email, current_user) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash("You cannot use this email address as your mailbox", "error")
|
||||
else:
|
||||
mailbox.new_email = new_email
|
||||
Session.commit()
|
||||
|
||||
try:
|
||||
verify_mailbox_change(current_user, mailbox, new_email)
|
||||
except SMTPRecipientsRefused:
|
||||
flash(
|
||||
f"Incorrect mailbox, please recheck {mailbox.email}",
|
||||
"error",
|
||||
)
|
||||
else:
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {new_email}.",
|
||||
"success",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
try:
|
||||
response = mailbox_utils.request_mailbox_email_change(
|
||||
current_user, mailbox, change_email_form.email.data
|
||||
)
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox.email}.",
|
||||
"success",
|
||||
)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(e.msg, "error")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif request.form.get("form-name") == "force-spf":
|
||||
if not ENFORCE_SPF:
|
||||
flash("SPF enforcement globally not enabled", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
mailbox.force_spf = (
|
||||
True if request.form.get("spf-status") == "on" else False
|
||||
force_spf_value = request.form.get("spf-status") == "on"
|
||||
mailbox.force_spf = force_spf_value
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set force_spf to {force_spf_value} on mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
@ -109,6 +103,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
||||
flash(f"{address} already added", "error")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Add authorized address {address} to mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
AuthorizedAddress.create(
|
||||
user_id=current_user.id,
|
||||
mailbox_id=mailbox.id,
|
||||
@ -129,6 +128,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
else:
|
||||
address = authorized_address.email
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove authorized address {address} from mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
AuthorizedAddress.delete(authorized_address_id)
|
||||
Session.commit()
|
||||
flash(f"{address} has been deleted", "success")
|
||||
@ -161,6 +165,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Add PGP Key {mailbox.pgp_finger_print} to mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Your PGP public key is saved successfully", "success")
|
||||
return redirect(
|
||||
@ -168,6 +177,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
# Free user can decide to remove their added PGP key
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove PGP Key {mailbox.pgp_finger_print} from mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
mailbox.pgp_public_key = None
|
||||
mailbox.pgp_finger_print = None
|
||||
mailbox.disable_pgp = False
|
||||
@ -179,10 +193,27 @@ def mailbox_detail_route(mailbox_id):
|
||||
|
||||
elif request.form.get("form-name") == "toggle-pgp":
|
||||
if request.form.get("pgp-enabled") == "on":
|
||||
mailbox.disable_pgp = False
|
||||
flash(f"PGP is enabled on {mailbox.email}", "success")
|
||||
if mailbox.is_proton():
|
||||
mailbox.disable_pgp = True
|
||||
flash(
|
||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
||||
"info",
|
||||
)
|
||||
else:
|
||||
mailbox.disable_pgp = False
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Enabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
flash(f"PGP is enabled on {mailbox.email}", "info")
|
||||
else:
|
||||
mailbox.disable_pgp = True
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Disabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
flash(f"PGP is disabled on {mailbox.email}", "info")
|
||||
|
||||
Session.commit()
|
||||
@ -192,6 +223,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
elif request.form.get("form-name") == "generic-subject":
|
||||
if request.form.get("action") == "save":
|
||||
mailbox.generic_subject = request.form.get("generic-subject")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Generic subject is enabled", "success")
|
||||
return redirect(
|
||||
@ -199,6 +235,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
mailbox.generic_subject = None
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Generic subject is disabled", "success")
|
||||
return redirect(
|
||||
@ -209,91 +250,57 @@ def mailbox_detail_route(mailbox_id):
|
||||
return render_template("dashboard/mailbox_detail.html", **locals())
|
||||
|
||||
|
||||
def verify_mailbox_change(user, mailbox, new_email):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||
verification_url = (
|
||||
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
|
||||
)
|
||||
|
||||
send_email(
|
||||
new_email,
|
||||
"Confirm mailbox change on SimpleLogin",
|
||||
render(
|
||||
"transactional/verify-mailbox-change.txt.jinja2",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=new_email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox-change.html",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=new_email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route(
|
||||
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
|
||||
)
|
||||
@login_required
|
||||
def cancel_mailbox_change_route(mailbox_id):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
if mailbox.new_email:
|
||||
mailbox.new_email = None
|
||||
Session.commit()
|
||||
try:
|
||||
mailbox_utils.cancel_email_change(mailbox_id, current_user)
|
||||
flash("Your mailbox change is cancelled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
else:
|
||||
flash("You have no pending mailbox change", "warning")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
except MailboxError as e:
|
||||
flash(e.msg, "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/mailbox/confirm_change")
|
||||
def mailbox_confirm_change_route():
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
signed_mailbox_id = request.args.get("mailbox_id")
|
||||
@login_required
|
||||
@limiter.limit("3/minute")
|
||||
def mailbox_confirm_email_change_route():
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
|
||||
try:
|
||||
mailbox_id = int(s.unsign(signed_mailbox_id, max_age=900))
|
||||
except Exception:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
else:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
# new_email can be None if user cancels change in the meantime
|
||||
if mailbox and mailbox.new_email:
|
||||
user = mailbox.user
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
flash(f"{mailbox.new_email} is already used", "error")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||
)
|
||||
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
|
||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox change %s is verified", mailbox)
|
||||
flash(f"The {mailbox.email} is updated", "success")
|
||||
code = request.args.get("code")
|
||||
if code:
|
||||
try:
|
||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||
flash("Successfully changed mailbox email", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||
)
|
||||
else:
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
else:
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
try:
|
||||
mailbox_id = int(s.unsign(mailbox_id, max_age=900))
|
||||
res = perform_mailbox_email_change(mailbox_id)
|
||||
flash(res.message, res.message_category)
|
||||
if res.error:
|
||||
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif res.error == MailboxEmailChangeError.InvalidId:
|
||||
return redirect(url_for("dashboard.index"))
|
||||
else:
|
||||
raise Exception("Unhandled MailboxEmailChangeError")
|
||||
except Exception:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
flash("Successfully changed mailbox email", "success")
|
||||
return redirect(url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id))
|
||||
|
@ -43,7 +43,10 @@ def notification_route(notification_id):
|
||||
def notifications_route():
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
page = int(request.args.get("page"))
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
notifications = (
|
||||
Notification.filter_by(user_id=current_user.id)
|
||||
|
@ -22,7 +22,7 @@ from app.models import (
|
||||
PartnerUser,
|
||||
PartnerSubscription,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
|
||||
|
||||
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
|
||||
|
@ -13,51 +13,39 @@ from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from flask_wtf.file import FileField
|
||||
from wtforms import StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from app import s3, email_utils
|
||||
from app import s3, user_settings
|
||||
from app.config import (
|
||||
URL,
|
||||
FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
personal_email_already_used,
|
||||
)
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.extensions import limiter
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
BlockBehaviourEnum,
|
||||
PlanEnum,
|
||||
File,
|
||||
ResetPasswordCode,
|
||||
EmailChange,
|
||||
User,
|
||||
Alias,
|
||||
CustomDomain,
|
||||
AliasGeneratorEnum,
|
||||
AliasSuffixEnum,
|
||||
ManualSubscription,
|
||||
SenderFormatEnum,
|
||||
SLDomain,
|
||||
CoinbaseSubscription,
|
||||
AppleSubscription,
|
||||
PartnerUser,
|
||||
PartnerSubscription,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner, perform_proton_account_unlink
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from app.proton.proton_unlink import can_unlink_proton_account
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
canonicalize_email,
|
||||
)
|
||||
|
||||
|
||||
@ -66,12 +54,6 @@ class SettingForm(FlaskForm):
|
||||
profile_picture = FileField("Profile Picture")
|
||||
|
||||
|
||||
class ChangeEmailForm(FlaskForm):
|
||||
email = EmailField(
|
||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||
)
|
||||
|
||||
|
||||
class PromoCodeForm(FlaskForm):
|
||||
code = StringField("Name", validators=[validators.DataRequired()])
|
||||
|
||||
@ -109,7 +91,6 @@ def get_partner_subscription_and_name(
|
||||
def setting():
|
||||
form = SettingForm()
|
||||
promo_form = PromoCodeForm()
|
||||
change_email_form = ChangeEmailForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
@ -122,63 +103,7 @@ def setting():
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-email":
|
||||
if change_email_form.validate():
|
||||
# whether user can proceed with the email update
|
||||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
new_email_valid = False
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
# a pending email change with the same email exists from another user
|
||||
elif EmailChange.get_by(new_email=new_email):
|
||||
other_email_change: EmailChange = EmailChange.get_by(
|
||||
new_email=new_email
|
||||
)
|
||||
LOG.w(
|
||||
"Another user has a pending %s with the same email address. Current user:%s",
|
||||
other_email_change,
|
||||
current_user,
|
||||
)
|
||||
|
||||
if other_email_change.is_expired():
|
||||
LOG.d(
|
||||
"delete the expired email change %s", other_email_change
|
||||
)
|
||||
EmailChange.delete(other_email_change.id)
|
||||
Session.commit()
|
||||
else:
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
|
||||
if new_email_valid:
|
||||
email_change = EmailChange.create(
|
||||
user_id=current_user.id,
|
||||
code=random_string(
|
||||
60
|
||||
), # todo: make sure the code is unique
|
||||
new_email=new_email,
|
||||
)
|
||||
Session.commit()
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash(
|
||||
"A confirmation email is on the way, please check your inbox",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-profile":
|
||||
if form.validate():
|
||||
profile_updated = False
|
||||
@ -222,15 +147,6 @@ def setting():
|
||||
if profile_updated:
|
||||
flash("Your profile has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-password":
|
||||
flash(
|
||||
"You are going to receive an email containing instructions to change your password",
|
||||
"success",
|
||||
)
|
||||
send_reset_password_email(current_user)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "notification-preference":
|
||||
choose = request.form.get("notification")
|
||||
if choose == "on":
|
||||
@ -240,7 +156,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your notification preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-alias-generator":
|
||||
scheme = int(request.form.get("alias-generator-scheme"))
|
||||
if AliasGeneratorEnum.has_value(scheme):
|
||||
@ -248,54 +163,29 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-random-alias-default-domain":
|
||||
default_domain = request.form.get("random-alias-default-domain")
|
||||
|
||||
if default_domain:
|
||||
sl_domain: SLDomain = SLDomain.get_by(domain=default_domain)
|
||||
if sl_domain:
|
||||
if sl_domain.premium_only and not current_user.is_premium():
|
||||
flash("You cannot use this domain", "error")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
current_user.default_alias_public_domain_id = sl_domain.id
|
||||
current_user.default_alias_custom_domain_id = None
|
||||
else:
|
||||
custom_domain = CustomDomain.get_by(domain=default_domain)
|
||||
if custom_domain:
|
||||
# sanity check
|
||||
if (
|
||||
custom_domain.user_id != current_user.id
|
||||
or not custom_domain.verified
|
||||
):
|
||||
LOG.w(
|
||||
"%s cannot use domain %s", current_user, custom_domain
|
||||
)
|
||||
flash(f"Domain {default_domain} can't be used", "error")
|
||||
return redirect(request.url)
|
||||
else:
|
||||
current_user.default_alias_custom_domain_id = (
|
||||
custom_domain.id
|
||||
)
|
||||
current_user.default_alias_public_domain_id = None
|
||||
|
||||
else:
|
||||
current_user.default_alias_custom_domain_id = None
|
||||
current_user.default_alias_public_domain_id = None
|
||||
try:
|
||||
user_settings.set_default_alias_domain(current_user, default_domain)
|
||||
except user_settings.CannotSetAlias as e:
|
||||
flash(e.msg, "error")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "random-alias-suffix":
|
||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||
try:
|
||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||
except ValueError:
|
||||
flash("Invalid value", "error")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if AliasSuffixEnum.has_value(scheme):
|
||||
current_user.random_alias_suffix = scheme
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-sender-format":
|
||||
sender_format = int(request.form.get("sender-format"))
|
||||
if SenderFormatEnum.has_value(sender_format):
|
||||
@ -305,7 +195,6 @@ def setting():
|
||||
flash("Your sender format preference has been updated", "success")
|
||||
Session.commit()
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "replace-ra":
|
||||
choose = request.form.get("replace-ra")
|
||||
if choose == "on":
|
||||
@ -315,7 +204,21 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "enable_data_breach_check":
|
||||
if not current_user.is_premium():
|
||||
flash("Only premium plan can enable data breach monitoring", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
choose = request.form.get("enable_data_breach_check")
|
||||
if choose == "on":
|
||||
LOG.i("User {current_user} has enabled data breach monitoring")
|
||||
current_user.enable_data_breach_check = True
|
||||
flash("Data breach monitoring is enabled", "success")
|
||||
else:
|
||||
LOG.i("User {current_user} has disabled data breach monitoring")
|
||||
current_user.enable_data_breach_check = False
|
||||
flash("Data breach monitoring is disabled", "info")
|
||||
Session.commit()
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "sender-in-ra":
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
@ -325,7 +228,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "expand-alias-info":
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
@ -387,14 +289,6 @@ def setting():
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "send-full-user-report":
|
||||
if ExportUserDataJob(current_user).store_job_in_db():
|
||||
flash(
|
||||
"You will receive your SimpleLogin data via email shortly",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
flash("An export of your data is currently in progress", "error")
|
||||
|
||||
manual_sub = ManualSubscription.get_by(user_id=current_user.id)
|
||||
apple_sub = AppleSubscription.get_by(user_id=current_user.id)
|
||||
@ -417,7 +311,6 @@ def setting():
|
||||
SenderFormatEnum=SenderFormatEnum,
|
||||
BlockBehaviourEnum=BlockBehaviourEnum,
|
||||
promo_form=promo_form,
|
||||
change_email_form=change_email_form,
|
||||
pending_email=pending_email,
|
||||
AliasGeneratorEnum=AliasGeneratorEnum,
|
||||
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
|
||||
@ -431,86 +324,5 @@ def setting():
|
||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
proton_linked_account=proton_linked_account,
|
||||
can_unlink_proton_account=can_unlink_proton_account(current_user),
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(user):
|
||||
"""
|
||||
generate a new ResetPasswordCode and send it over email to user
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=random_string(60)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
|
||||
|
||||
email_utils.send_reset_password_email(user.email, reset_password_link)
|
||||
|
||||
|
||||
def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||
"""
|
||||
send confirmation email to the new email address
|
||||
"""
|
||||
|
||||
link = f"{URL}/auth/change_email?code={email_change.code}"
|
||||
|
||||
email_utils.send_change_email(email_change.new_email, user.email, link)
|
||||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@limiter.limit("5/hour")
|
||||
@login_required
|
||||
def resend_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
email_change.expired = arrow.now().shift(hours=12)
|
||||
Session.commit()
|
||||
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash("A confirmation email is on the way, please check your inbox", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def cancel_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
Session.commit()
|
||||
flash("Your email change is cancelled", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
|
||||
@login_required
|
||||
def unlink_proton_account():
|
||||
csrf_form = CSRFValidationForm()
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
perform_proton_account_unlink(current_user)
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
@ -11,6 +11,7 @@ from app.dashboard.base import dashboard_bp
|
||||
from app.errors import SubdomainInTrashError
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, Mailbox, SLDomain
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
# Only lowercase letters, numbers, dashes (-) are currently supported
|
||||
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
||||
@ -102,6 +103,12 @@ def subdomain_route():
|
||||
ownership_verified=True,
|
||||
commit=True,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.CreateCustomDomain,
|
||||
message=f"Create subdomain {new_custom_domain.id} ({full_domain})",
|
||||
commit=True,
|
||||
)
|
||||
except SubdomainInTrashError:
|
||||
flash(
|
||||
f"{full_domain} has been used before and cannot be reused",
|
||||
|
@ -8,6 +8,7 @@ from app.db import Session
|
||||
from flask import redirect, url_for, flash, request, render_template
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import alias_utils
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.handler.unsubscribe_encoder import UnsubscribeAction
|
||||
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
||||
@ -31,7 +32,9 @@ def unsubscribe(alias_id):
|
||||
|
||||
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
||||
if request.method == "POST":
|
||||
alias.enabled = False
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False from unsubscribe request"
|
||||
)
|
||||
flash(f"Alias {alias.email} has been blocked", "success")
|
||||
Session.commit()
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
from io import BytesIO
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import request, render_template, redirect, url_for, flash
|
||||
from flask_login import current_user, login_required
|
||||
@ -11,6 +12,7 @@ from app.config import ADMIN_EMAIL
|
||||
from app.db import Session
|
||||
from app.developer.base import developer_bp
|
||||
from app.email_utils import send_email
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.log import LOG
|
||||
from app.models import Client, RedirectUri, File, Referral
|
||||
from app.utils import random_string
|
||||
@ -46,16 +48,25 @@ def client_detail(client_id):
|
||||
approval_form.description.data = client.description
|
||||
|
||||
if action == "edit" and form.validate_on_submit():
|
||||
parsed_url = urlparse(form.url.data)
|
||||
if parsed_url.scheme != "https":
|
||||
flash("Only https urls are allowed", "error")
|
||||
return redirect(url_for("developer.index"))
|
||||
client.name = form.name.data
|
||||
client.home_url = form.url.data
|
||||
|
||||
if form.icon.data:
|
||||
# todo: remove current icon if any
|
||||
# todo: handle remove icon
|
||||
icon_data = form.icon.data.read(10240)
|
||||
if detect_image_format(icon_data) == ImageFormat.Unknown:
|
||||
flash("Unknown file format", "warning")
|
||||
return redirect(url_for("developer.index"))
|
||||
if client.icon:
|
||||
s3.delete(client.icon_id)
|
||||
File.delete(client.icon)
|
||||
file_path = random_string(30)
|
||||
file = File.create(path=file_path, user_id=client.user_id)
|
||||
|
||||
s3.upload_from_bytesio(file_path, BytesIO(form.icon.data.read()))
|
||||
s3.upload_from_bytesio(file_path, BytesIO(icon_data))
|
||||
|
||||
Session.flush()
|
||||
LOG.d("upload file %s to s3", file)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""List of clients"""
|
||||
|
||||
from flask import render_template
|
||||
from flask_login import current_user, login_required
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import render_template, redirect, url_for, flash
|
||||
from flask_login import current_user, login_required
|
||||
from flask_wtf import FlaskForm
|
||||
@ -20,6 +22,10 @@ def new_client():
|
||||
|
||||
if form.validate_on_submit():
|
||||
client = Client.create_new(form.name.data, current_user.id)
|
||||
parsed_url = urlparse(form.url.data)
|
||||
if parsed_url.scheme != "https":
|
||||
flash("Only https urls are allowed", "error")
|
||||
return redirect(url_for("developer.new_client"))
|
||||
client.home_url = form.url.data
|
||||
Session.commit()
|
||||
|
||||
|
@ -1,120 +1,134 @@
|
||||
from app import config
|
||||
from typing import Optional, List, Tuple
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
import dns.resolver
|
||||
|
||||
|
||||
def _get_dns_resolver():
|
||||
my_resolver = dns.resolver.Resolver()
|
||||
my_resolver.nameservers = config.NAMESERVERS
|
||||
|
||||
return my_resolver
|
||||
|
||||
|
||||
def get_ns(hostname) -> [str]:
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
return [a.to_text() for a in answers]
|
||||
|
||||
|
||||
def get_cname_record(hostname) -> Optional[str]:
|
||||
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
for a in answers:
|
||||
ret = a.to_text()
|
||||
return ret[:-1]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_mx_domains(hostname) -> [(int, str)]:
|
||||
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers:
|
||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||
parts = record.split(" ")
|
||||
|
||||
ret.append((int(parts[0]), parts[1]))
|
||||
|
||||
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
||||
|
||||
from app.config import NAMESERVERS
|
||||
|
||||
_include_spf = "include:"
|
||||
|
||||
|
||||
def get_spf_domain(hostname) -> [str]:
|
||||
"""return all domains listed in *include:*"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
class DNSClient(ABC):
|
||||
@abstractmethod
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
pass
|
||||
|
||||
ret = []
|
||||
@abstractmethod
|
||||
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
||||
pass
|
||||
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
record = record.decode() # record is bytes
|
||||
def get_spf_domain(self, hostname: str) -> List[str]:
|
||||
"""
|
||||
return all domains listed in *include:*
|
||||
"""
|
||||
try:
|
||||
records = self.get_txt_record(hostname)
|
||||
ret = []
|
||||
for record in records:
|
||||
if record.startswith("v=spf1"):
|
||||
parts = record.split(" ")
|
||||
for part in parts:
|
||||
if part.startswith(_include_spf):
|
||||
ret.append(
|
||||
part[part.find(_include_spf) + len(_include_spf) :]
|
||||
)
|
||||
return ret
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
if record.startswith("v=spf1"):
|
||||
@abstractmethod
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
pass
|
||||
|
||||
|
||||
class NetworkDNSClient(DNSClient):
|
||||
def __init__(self, nameservers: List[str]):
|
||||
self._resolver = dns.resolver.Resolver()
|
||||
self._resolver.nameservers = nameservers
|
||||
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
"""
|
||||
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
|
||||
"""
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "CNAME", search=True)
|
||||
for a in answers:
|
||||
ret = a.to_text()
|
||||
return ret[:-1]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
||||
"""
|
||||
return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
ret = {}
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "MX", search=True)
|
||||
for a in answers:
|
||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||
parts = record.split(" ")
|
||||
for part in parts:
|
||||
if part.startswith(_include_spf):
|
||||
ret.append(part[part.find(_include_spf) + len(_include_spf) :])
|
||||
prio = int(parts[0])
|
||||
if prio not in ret:
|
||||
ret[prio] = []
|
||||
ret[prio].append(parts[1])
|
||||
except Exception:
|
||||
pass
|
||||
return ret
|
||||
|
||||
return ret
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "TXT", search=False)
|
||||
ret = []
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
ret.append(record.decode())
|
||||
return ret
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
def get_txt_record(hostname) -> [str]:
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
class InMemoryDNSClient(DNSClient):
|
||||
def __init__(self):
|
||||
self.cname_records: dict[str, Optional[str]] = {}
|
||||
self.mx_records: dict[int, dict[int, list[str]]] = {}
|
||||
self.spf_records: dict[str, List[str]] = {}
|
||||
self.txt_records: dict[str, List[str]] = {}
|
||||
|
||||
ret = []
|
||||
def set_cname_record(self, hostname: str, cname: str):
|
||||
self.cname_records[hostname] = cname
|
||||
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
record = record.decode() # record is bytes
|
||||
def set_mx_records(self, hostname: str, mx_list: dict[int, list[str]]):
|
||||
self.mx_records[hostname] = mx_list
|
||||
|
||||
ret.append(record)
|
||||
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
||||
self.txt_records[hostname] = txt_list
|
||||
|
||||
return ret
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
return self.cname_records.get(hostname)
|
||||
|
||||
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
||||
return self.mx_records.get(hostname, {})
|
||||
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
return self.txt_records.get(hostname, [])
|
||||
|
||||
|
||||
def is_mx_equivalent(
|
||||
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
|
||||
) -> bool:
|
||||
"""
|
||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||
mx_domains and ref_mx_domains are list of (priority, domain)
|
||||
global_dns_client: Optional[DNSClient] = None
|
||||
|
||||
The priority order is taken into account but not the priority number.
|
||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||
"""
|
||||
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
|
||||
ref_mx_domains = sorted(
|
||||
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
|
||||
)
|
||||
|
||||
if len(mx_domains) < len(ref_mx_domains):
|
||||
return False
|
||||
def get_network_dns_client() -> DNSClient:
|
||||
global global_dns_client
|
||||
if global_dns_client is not None:
|
||||
return global_dns_client
|
||||
return NetworkDNSClient(NAMESERVERS)
|
||||
|
||||
for i in range(0, len(ref_mx_domains)):
|
||||
if mx_domains[i][1] != ref_mx_domains[i][1]:
|
||||
return False
|
||||
|
||||
return True
|
||||
def set_global_dns_client(dns_client: Optional[DNSClient]):
|
||||
global global_dns_client
|
||||
global_dns_client = dns_client
|
||||
|
||||
|
||||
def get_mx_domains(hostname: str) -> dict[int, list[str]]:
|
||||
return get_network_dns_client().get_mx_domains(hostname)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Email headers"""
|
||||
|
||||
MESSAGE_ID = "Message-ID"
|
||||
IN_REPLY_TO = "In-Reply-To"
|
||||
REFERENCES = "References"
|
||||
@ -21,6 +22,7 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||
RETURN_PATH = "Return-Path"
|
||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||
SL_QUEUE_ID = "X-SL-Queue-Id"
|
||||
|
||||
# headers used to DKIM sign in order of preference
|
||||
DKIM_HEADERS = [
|
||||
|
@ -33,6 +33,7 @@ from flanker.addresslib import address
|
||||
from flanker.addresslib.address import EmailAddress
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from sqlalchemy import func
|
||||
from flask_login import current_user
|
||||
|
||||
from app import config
|
||||
from app.db import Session
|
||||
@ -68,17 +69,27 @@ VERP_TIME_START = 1640995200
|
||||
VERP_HMAC_ALGO = "sha3-224"
|
||||
|
||||
|
||||
def render(template_name, **kwargs) -> str:
|
||||
def render(template_name: str, user: Optional[User], **kwargs) -> str:
|
||||
templates_dir = os.path.join(config.ROOT_DIR, "templates", "emails")
|
||||
env = Environment(loader=FileSystemLoader(templates_dir))
|
||||
|
||||
template = env.get_template(template_name)
|
||||
|
||||
if user is None:
|
||||
if current_user and current_user.is_authenticated:
|
||||
user = current_user
|
||||
|
||||
use_partner_template = False
|
||||
if user:
|
||||
use_partner_template = user.has_used_alias_from_partner()
|
||||
kwargs["user"] = user
|
||||
|
||||
return template.render(
|
||||
MAX_NB_EMAIL_FREE_PLAN=config.MAX_NB_EMAIL_FREE_PLAN,
|
||||
URL=config.URL,
|
||||
LANDING_PAGE_URL=config.LANDING_PAGE_URL,
|
||||
YEAR=arrow.now().year,
|
||||
USE_PARTNER_TEMPLATE=use_partner_template,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@ -111,53 +122,59 @@ def send_trial_end_soon_email(user):
|
||||
)
|
||||
|
||||
|
||||
def send_activation_email(email, activation_link):
|
||||
def send_activation_email(user: User, activation_link):
|
||||
send_email(
|
||||
email,
|
||||
user.email,
|
||||
"Just one more step to join SimpleLogin",
|
||||
render(
|
||||
"transactional/activation.txt",
|
||||
user=user,
|
||||
activation_link=activation_link,
|
||||
email=email,
|
||||
email=user.email,
|
||||
),
|
||||
render(
|
||||
"transactional/activation.html",
|
||||
user=user,
|
||||
activation_link=activation_link,
|
||||
email=email,
|
||||
email=user.email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(email, reset_password_link):
|
||||
def send_reset_password_email(user: User, reset_password_link):
|
||||
send_email(
|
||||
email,
|
||||
user.email,
|
||||
"Reset your password on SimpleLogin",
|
||||
render(
|
||||
"transactional/reset-password.txt",
|
||||
user=user,
|
||||
reset_password_link=reset_password_link,
|
||||
),
|
||||
render(
|
||||
"transactional/reset-password.html",
|
||||
user=user,
|
||||
reset_password_link=reset_password_link,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def send_change_email(new_email, current_email, link):
|
||||
def send_change_email(user: User, new_email, link):
|
||||
send_email(
|
||||
new_email,
|
||||
"Confirm email update on SimpleLogin",
|
||||
render(
|
||||
"transactional/change-email.txt",
|
||||
user=user,
|
||||
link=link,
|
||||
new_email=new_email,
|
||||
current_email=current_email,
|
||||
current_email=user.email,
|
||||
),
|
||||
render(
|
||||
"transactional/change-email.html",
|
||||
user=user,
|
||||
link=link,
|
||||
new_email=new_email,
|
||||
current_email=current_email,
|
||||
current_email=user.email,
|
||||
),
|
||||
)
|
||||
|
||||
@ -170,28 +187,32 @@ def send_invalid_totp_login_email(user, totp_type):
|
||||
"Unsuccessful attempt to login to your SimpleLogin account",
|
||||
render(
|
||||
"transactional/invalid-totp-login.txt",
|
||||
user=user,
|
||||
type=totp_type,
|
||||
),
|
||||
render(
|
||||
"transactional/invalid-totp-login.html",
|
||||
user=user,
|
||||
type=totp_type,
|
||||
),
|
||||
1,
|
||||
)
|
||||
|
||||
|
||||
def send_test_email_alias(email, name):
|
||||
def send_test_email_alias(user: User, email: str):
|
||||
send_email(
|
||||
email,
|
||||
f"This email is sent to {email}",
|
||||
render(
|
||||
"transactional/test-email.txt",
|
||||
name=name,
|
||||
user=user,
|
||||
name=user.name,
|
||||
alias=email,
|
||||
),
|
||||
render(
|
||||
"transactional/test-email.html",
|
||||
name=name,
|
||||
user=user,
|
||||
name=user.name,
|
||||
alias=email,
|
||||
),
|
||||
)
|
||||
@ -206,11 +227,13 @@ def send_cannot_create_directory_alias(user, alias_address, directory_name):
|
||||
f"Alias {alias_address} cannot be created",
|
||||
render(
|
||||
"transactional/cannot-create-alias-directory.txt",
|
||||
user=user,
|
||||
alias=alias_address,
|
||||
directory=directory_name,
|
||||
),
|
||||
render(
|
||||
"transactional/cannot-create-alias-directory.html",
|
||||
user=user,
|
||||
alias=alias_address,
|
||||
directory=directory_name,
|
||||
),
|
||||
@ -228,11 +251,13 @@ def send_cannot_create_directory_alias_disabled(user, alias_address, directory_n
|
||||
f"Alias {alias_address} cannot be created",
|
||||
render(
|
||||
"transactional/cannot-create-alias-directory-disabled.txt",
|
||||
user=user,
|
||||
alias=alias_address,
|
||||
directory=directory_name,
|
||||
),
|
||||
render(
|
||||
"transactional/cannot-create-alias-directory-disabled.html",
|
||||
user=user,
|
||||
alias=alias_address,
|
||||
directory=directory_name,
|
||||
),
|
||||
@ -248,11 +273,13 @@ def send_cannot_create_domain_alias(user, alias, domain):
|
||||
f"Alias {alias} cannot be created",
|
||||
render(
|
||||
"transactional/cannot-create-alias-domain.txt",
|
||||
user=user,
|
||||
alias=alias,
|
||||
domain=domain,
|
||||
),
|
||||
render(
|
||||
"transactional/cannot-create-alias-domain.html",
|
||||
user=user,
|
||||
alias=alias,
|
||||
domain=domain,
|
||||
),
|
||||
@ -494,9 +521,10 @@ def delete_header(msg: Message, header: str):
|
||||
|
||||
def sanitize_header(msg: Message, header: str):
|
||||
"""remove trailing space and remove linebreak from a header"""
|
||||
header_lowercase = header.lower()
|
||||
for i in reversed(range(len(msg._headers))):
|
||||
header_name = msg._headers[i][0].lower()
|
||||
if header_name == header.lower():
|
||||
if header_name == header_lowercase:
|
||||
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
|
||||
if msg._headers[i][1]:
|
||||
msg._headers[i] = (
|
||||
@ -520,7 +548,9 @@ def can_create_directory_for_address(email_address: str) -> bool:
|
||||
for domain in config.ALIAS_DOMAINS:
|
||||
if email_address.endswith("@" + domain):
|
||||
return True
|
||||
|
||||
LOG.i(
|
||||
f"Cannot create address in directory for {email_address} since it does not belong to a valid directory domain"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
@ -562,7 +592,7 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
|
||||
|
||||
from app.models import CustomDomain
|
||||
|
||||
if CustomDomain.get_by(domain=domain, verified=True):
|
||||
if CustomDomain.get_by(domain=domain, is_sl_subdomain=True, verified=True):
|
||||
LOG.d("domain %s is a SimpleLogin custom domain", domain)
|
||||
return False
|
||||
|
||||
@ -627,7 +657,11 @@ def get_mx_domain_list(domain) -> [str]:
|
||||
"""
|
||||
priority_domains = get_mx_domains(domain)
|
||||
|
||||
return [d[:-1] for _, d in priority_domains]
|
||||
mx_domains = []
|
||||
for prio in priority_domains:
|
||||
for domain in priority_domains[prio]:
|
||||
mx_domains.append(domain[:-1])
|
||||
return mx_domains
|
||||
|
||||
|
||||
def personal_email_already_used(email_address: str) -> bool:
|
||||
@ -918,10 +952,20 @@ def decode_text(text: str, encoding: EmailEncoding = EmailEncoding.NO) -> str:
|
||||
return text
|
||||
|
||||
|
||||
def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
def add_header(
|
||||
msg: Message, text_header, html_header=None, subject_prefix=None
|
||||
) -> Message:
|
||||
if not html_header:
|
||||
html_header = text_header.replace("\n", "<br>")
|
||||
|
||||
if subject_prefix is not None:
|
||||
subject = msg[headers.SUBJECT]
|
||||
if not subject:
|
||||
msg.add_header(headers.SUBJECT, subject_prefix)
|
||||
else:
|
||||
subject = f"{subject_prefix} {subject}"
|
||||
msg.replace_header(headers.SUBJECT, subject)
|
||||
|
||||
content_type = msg.get_content_type().lower()
|
||||
if content_type == "text/plain":
|
||||
encoding = get_encoding(msg)
|
||||
@ -1252,6 +1296,7 @@ def spf_pass(
|
||||
f"SimpleLogin Alert: attempt to send emails from your alias {alias.email} from unknown IP Address",
|
||||
render(
|
||||
"transactional/spf-fail.txt",
|
||||
user=user,
|
||||
alias=alias.email,
|
||||
ip=ip,
|
||||
mailbox_url=config.URL + f"/dashboard/mailbox/{mailbox.id}#spf",
|
||||
@ -1261,6 +1306,7 @@ def spf_pass(
|
||||
),
|
||||
render(
|
||||
"transactional/spf-fail.html",
|
||||
user=user,
|
||||
ip=ip,
|
||||
mailbox_url=config.URL + f"/dashboard/mailbox/{mailbox.id}#spf",
|
||||
to_email=contact_email,
|
||||
@ -1303,17 +1349,18 @@ def get_queue_id(msg: Message) -> Optional[str]:
|
||||
|
||||
received_header = str(msg[headers.RECEIVED])
|
||||
if not received_header:
|
||||
return
|
||||
return None
|
||||
|
||||
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
|
||||
search_result = re.search("with ESMTPS id [0-9a-zA-Z]{1,}", received_header)
|
||||
if not search_result:
|
||||
return
|
||||
|
||||
# the "with ESMTPS id 4FxQmw1DXdz2vK2" part
|
||||
with_esmtps = received_header[search_result.start() : search_result.end()]
|
||||
|
||||
return with_esmtps[len("with ESMTPS id ") :]
|
||||
search_result = re.search(r"with E?SMTP[AS]? id ([0-9a-zA-Z]{1,})", received_header)
|
||||
if search_result:
|
||||
return search_result.group(1)
|
||||
search_result = re.search(
|
||||
r"\(Postfix\)\r\n\tid ([a-zA-Z0-9]{1,});", received_header
|
||||
)
|
||||
if search_result:
|
||||
return search_result.group(1)
|
||||
return None
|
||||
|
||||
|
||||
def should_ignore_bounce(mail_from: str) -> bool:
|
||||
@ -1403,7 +1450,7 @@ def generate_verp_email(
|
||||
# Time is in minutes granularity and start counting on 2022-01-01 to reduce bytes to represent time
|
||||
data = [
|
||||
verp_type.value,
|
||||
object_id,
|
||||
object_id or 0,
|
||||
int((time.time() - VERP_TIME_START) / 60),
|
||||
]
|
||||
json_payload = json.dumps(data).encode("utf-8")
|
||||
|
0
app/app/events/__init__.py
Normal file
0
app/app/events/__init__.py
Normal file
95
app/app/events/event_dispatcher.py
Normal file
95
app/app/events/event_dispatcher.py
Normal file
@ -0,0 +1,95 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import newrelic.agent
|
||||
|
||||
from app import config
|
||||
from app.db import Session
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.events.generated import event_pb2
|
||||
from app.log import LOG
|
||||
from app.models import User, PartnerUser, SyncEvent
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from typing import Optional
|
||||
|
||||
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
|
||||
|
||||
|
||||
class Dispatcher(ABC):
|
||||
@abstractmethod
|
||||
def send(self, event: bytes):
|
||||
pass
|
||||
|
||||
|
||||
class PostgresDispatcher(Dispatcher):
|
||||
def send(self, event: bytes):
|
||||
instance = SyncEvent.create(content=event, flush=True)
|
||||
Session.execute(f"NOTIFY {NOTIFICATION_CHANNEL}, '{instance.id}';")
|
||||
|
||||
@staticmethod
|
||||
def get():
|
||||
return PostgresDispatcher()
|
||||
|
||||
|
||||
class GlobalDispatcher:
|
||||
__dispatcher: Optional[Dispatcher] = None
|
||||
|
||||
@staticmethod
|
||||
def get_dispatcher() -> Dispatcher:
|
||||
if not GlobalDispatcher.__dispatcher:
|
||||
GlobalDispatcher.__dispatcher = PostgresDispatcher.get()
|
||||
return GlobalDispatcher.__dispatcher
|
||||
|
||||
@staticmethod
|
||||
def set_dispatcher(dispatcher: Optional[Dispatcher]):
|
||||
GlobalDispatcher.__dispatcher = dispatcher
|
||||
|
||||
|
||||
class EventDispatcher:
|
||||
@staticmethod
|
||||
def send_event(
|
||||
user: User,
|
||||
content: event_pb2.EventContent,
|
||||
dispatcher: Optional[Dispatcher] = None,
|
||||
skip_if_webhook_missing: bool = True,
|
||||
):
|
||||
if dispatcher is None:
|
||||
dispatcher = GlobalDispatcher.get_dispatcher()
|
||||
if config.EVENT_WEBHOOK_DISABLE:
|
||||
LOG.i("Not sending events because webhook is disabled")
|
||||
return
|
||||
|
||||
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
|
||||
LOG.i(
|
||||
"Not sending events because webhook is not configured and allowed to be empty"
|
||||
)
|
||||
return
|
||||
|
||||
partner_user = EventDispatcher.__partner_user(user.id)
|
||||
if not partner_user:
|
||||
LOG.i(f"Not sending events because there's no partner user for user {user}")
|
||||
return
|
||||
|
||||
event = event_pb2.Event(
|
||||
user_id=user.id,
|
||||
external_user_id=partner_user.external_user_id,
|
||||
partner_id=partner_user.partner_id,
|
||||
content=content,
|
||||
)
|
||||
|
||||
serialized = event.SerializeToString()
|
||||
dispatcher.send(serialized)
|
||||
|
||||
event_type = content.WhichOneof("content")
|
||||
newrelic.agent.record_custom_event("EventStoredToDb", {"type": event_type})
|
||||
LOG.i("Sent event to the dispatcher")
|
||||
|
||||
@staticmethod
|
||||
def __partner_user(user_id: int) -> Optional[PartnerUser]:
|
||||
# Check if the current user has a partner_id
|
||||
try:
|
||||
proton_partner_id = get_proton_partner().id
|
||||
except ProtonPartnerNotSetUp:
|
||||
return None
|
||||
|
||||
# It has. Retrieve the information for the PartnerUser
|
||||
return PartnerUser.get_by(user_id=user_id, partner_id=proton_partner_id)
|
52
app/app/events/generated/event_pb2.py
Normal file
52
app/app/events/generated/event_pb2.py
Normal file
@ -0,0 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# NO CHECKED-IN PROTOBUF GENCODE
|
||||
# source: event.proto
|
||||
# Protobuf Python Version: 5.27.0
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import runtime_version as _runtime_version
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
_runtime_version.ValidateProtobufRuntimeVersion(
|
||||
_runtime_version.Domain.PUBLIC,
|
||||
5,
|
||||
27,
|
||||
0,
|
||||
'',
|
||||
'event.proto'
|
||||
)
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\":\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\x12\x10\n\x08lifetime\x18\x02 \x01(\x08\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x0e\n\x0cUserUnlinked\"\xce\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x12\x39\n\ruser_unlinked\x18\x07 \x01(\x0b\x32 .simplelogin_events.UserUnlinkedH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
DESCRIPTOR._loaded_options = None
|
||||
_globals['_USERPLANCHANGED']._serialized_start=35
|
||||
_globals['_USERPLANCHANGED']._serialized_end=93
|
||||
_globals['_USERDELETED']._serialized_start=95
|
||||
_globals['_USERDELETED']._serialized_end=108
|
||||
_globals['_ALIASCREATED']._serialized_start=110
|
||||
_globals['_ALIASCREATED']._serialized_end=202
|
||||
_globals['_ALIASSTATUSCHANGED']._serialized_start=204
|
||||
_globals['_ALIASSTATUSCHANGED']._serialized_end=288
|
||||
_globals['_ALIASDELETED']._serialized_start=290
|
||||
_globals['_ALIASDELETED']._serialized_end=331
|
||||
_globals['_ALIASCREATEDLIST']._serialized_start=333
|
||||
_globals['_ALIASCREATEDLIST']._serialized_end=401
|
||||
_globals['_USERUNLINKED']._serialized_start=403
|
||||
_globals['_USERUNLINKED']._serialized_end=417
|
||||
_globals['_EVENTCONTENT']._serialized_start=420
|
||||
_globals['_EVENTCONTENT']._serialized_end=882
|
||||
_globals['_EVENT']._serialized_start=884
|
||||
_globals['_EVENT']._serialized_end=1005
|
||||
# @@protoc_insertion_point(module_scope)
|
92
app/app/events/generated/event_pb2.pyi
Normal file
92
app/app/events/generated/event_pb2.pyi
Normal file
@ -0,0 +1,92 @@
|
||||
from google.protobuf.internal import containers as _containers
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
|
||||
|
||||
DESCRIPTOR: _descriptor.FileDescriptor
|
||||
|
||||
class UserPlanChanged(_message.Message):
|
||||
__slots__ = ("plan_end_time", "lifetime")
|
||||
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||
LIFETIME_FIELD_NUMBER: _ClassVar[int]
|
||||
plan_end_time: int
|
||||
lifetime: bool
|
||||
def __init__(self, plan_end_time: _Optional[int] = ..., lifetime: bool = ...) -> None: ...
|
||||
|
||||
class UserDeleted(_message.Message):
|
||||
__slots__ = ()
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
class AliasCreated(_message.Message):
|
||||
__slots__ = ("id", "email", "note", "enabled", "created_at")
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
NOTE_FIELD_NUMBER: _ClassVar[int]
|
||||
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
|
||||
id: int
|
||||
email: str
|
||||
note: str
|
||||
enabled: bool
|
||||
created_at: int
|
||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., note: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
|
||||
|
||||
class AliasStatusChanged(_message.Message):
|
||||
__slots__ = ("id", "email", "enabled", "created_at")
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
|
||||
id: int
|
||||
email: str
|
||||
enabled: bool
|
||||
created_at: int
|
||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
|
||||
|
||||
class AliasDeleted(_message.Message):
|
||||
__slots__ = ("id", "email")
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
id: int
|
||||
email: str
|
||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ...) -> None: ...
|
||||
|
||||
class AliasCreatedList(_message.Message):
|
||||
__slots__ = ("events",)
|
||||
EVENTS_FIELD_NUMBER: _ClassVar[int]
|
||||
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
|
||||
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
|
||||
|
||||
class UserUnlinked(_message.Message):
|
||||
__slots__ = ()
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
class EventContent(_message.Message):
|
||||
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list", "user_unlinked")
|
||||
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
|
||||
USER_UNLINKED_FIELD_NUMBER: _ClassVar[int]
|
||||
user_plan_change: UserPlanChanged
|
||||
user_deleted: UserDeleted
|
||||
alias_created: AliasCreated
|
||||
alias_status_change: AliasStatusChanged
|
||||
alias_deleted: AliasDeleted
|
||||
alias_create_list: AliasCreatedList
|
||||
user_unlinked: UserUnlinked
|
||||
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ..., user_unlinked: _Optional[_Union[UserUnlinked, _Mapping]] = ...) -> None: ...
|
||||
|
||||
class Event(_message.Message):
|
||||
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
|
||||
USER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EXTERNAL_USER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
PARTNER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
CONTENT_FIELD_NUMBER: _ClassVar[int]
|
||||
user_id: int
|
||||
external_user_id: str
|
||||
partner_id: int
|
||||
content: EventContent
|
||||
def __init__(self, user_id: _Optional[int] = ..., external_user_id: _Optional[str] = ..., partner_id: _Optional[int] = ..., content: _Optional[_Union[EventContent, _Mapping]] = ...) -> None: ...
|
@ -33,8 +33,11 @@ from app.models import (
|
||||
SLDomain,
|
||||
Hibp,
|
||||
AliasHibp,
|
||||
PartnerUser,
|
||||
PartnerSubscription,
|
||||
)
|
||||
from app.pgp_utils import load_public_key
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
|
||||
|
||||
def fake_data():
|
||||
@ -87,7 +90,7 @@ def fake_data():
|
||||
user_id=user.id,
|
||||
alias_id=alias.id,
|
||||
website_email="hey@google.com",
|
||||
reply_email="rep@sl.local",
|
||||
reply_email="rep@sl.lan",
|
||||
commit=True,
|
||||
)
|
||||
EmailLog.create(
|
||||
@ -163,7 +166,7 @@ def fake_data():
|
||||
# user_id=user.id,
|
||||
# alias_id=a.id,
|
||||
# website_email=f"contact{i}@example.com",
|
||||
# reply_email=f"rep{i}@sl.local",
|
||||
# reply_email=f"rep{i}@sl.lan",
|
||||
# )
|
||||
# Session.commit()
|
||||
# for _ in range(3):
|
||||
@ -269,3 +272,27 @@ def fake_data():
|
||||
CustomDomain.create(
|
||||
user_id=user.id, domain="old.com", verified=True, ownership_verified=True
|
||||
)
|
||||
|
||||
# Create a user
|
||||
proton_partner = get_proton_partner()
|
||||
user = User.create(
|
||||
email="test@proton.me",
|
||||
name="Proton test",
|
||||
password="password",
|
||||
activated=True,
|
||||
is_admin=False,
|
||||
intro_shown=True,
|
||||
from_partner=True,
|
||||
flush=True,
|
||||
)
|
||||
pu = PartnerUser.create(
|
||||
user_id=user.id,
|
||||
partner_id=proton_partner.id,
|
||||
partner_email="test@proton.me",
|
||||
external_user_id="DUMMY",
|
||||
flush=True,
|
||||
)
|
||||
PartnerSubscription.create(
|
||||
partner_user_id=pu.id, end_at=arrow.now().shift(years=1, days=1)
|
||||
)
|
||||
Session.commit()
|
||||
|
@ -30,7 +30,9 @@ def apply_dmarc_policy_for_forward_phase(
|
||||
) -> Tuple[Message, Optional[str]]:
|
||||
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
|
||||
if not DMARC_CHECK_ENABLED or not spam_result:
|
||||
LOG.i("DMARC check disabled")
|
||||
return msg, None
|
||||
LOG.i(f"Spam check result in {spam_result}")
|
||||
|
||||
from_header = get_header_unicode(msg[headers.FROM])
|
||||
|
||||
@ -62,6 +64,7 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||
msg,
|
||||
warning_plain_text,
|
||||
warning_html,
|
||||
subject_prefix="[Possible phishing attempt]",
|
||||
)
|
||||
return changed_msg, None
|
||||
|
||||
@ -74,6 +77,7 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||
msg,
|
||||
warning_plain_text,
|
||||
warning_html,
|
||||
subject_prefix="[Possible phishing attempt]",
|
||||
)
|
||||
return changed_msg, None
|
||||
|
||||
@ -102,12 +106,14 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||
f"An email sent to {alias.email} has been quarantined",
|
||||
render(
|
||||
"transactional/message-quarantine-dmarc.txt.jinja2",
|
||||
user=user,
|
||||
from_header=from_header,
|
||||
alias=alias,
|
||||
refused_email_url=email_log.get_dashboard_url(),
|
||||
),
|
||||
render(
|
||||
"transactional/message-quarantine-dmarc.html",
|
||||
user=user,
|
||||
from_header=from_header,
|
||||
alias=alias,
|
||||
refused_email_url=email_log.get_dashboard_url(),
|
||||
@ -150,8 +156,10 @@ def apply_dmarc_policy_for_reply_phase(
|
||||
) -> Optional[str]:
|
||||
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
|
||||
if not DMARC_CHECK_ENABLED or not spam_result:
|
||||
LOG.i("DMARC check disabled")
|
||||
return None
|
||||
|
||||
LOG.i(f"Spam check result is {spam_result}")
|
||||
if spam_result.dmarc not in (
|
||||
DmarcCheckResult.quarantine,
|
||||
DmarcCheckResult.reject,
|
||||
@ -170,12 +178,14 @@ def apply_dmarc_policy_for_reply_phase(
|
||||
f"Attempt to send an email to your contact {contact_recipient.email} from {envelope.mail_from}",
|
||||
render(
|
||||
"transactional/spoof-reply.txt.jinja2",
|
||||
user=alias_from.user,
|
||||
contact=contact_recipient,
|
||||
alias=alias_from,
|
||||
sender=envelope.mail_from,
|
||||
),
|
||||
render(
|
||||
"transactional/spoof-reply.html",
|
||||
user=alias_from.user,
|
||||
contact=contact_recipient,
|
||||
alias=alias_from,
|
||||
sender=envelope.mail_from,
|
||||
|
@ -319,11 +319,13 @@ def report_complaint_to_user_in_forward_phase(
|
||||
f"Abuse report from {capitalized_name}",
|
||||
render(
|
||||
"transactional/provider-complaint-forward-phase.txt.jinja2",
|
||||
user=user,
|
||||
email=mailbox_email,
|
||||
provider=capitalized_name,
|
||||
),
|
||||
render(
|
||||
"transactional/provider-complaint-forward-phase.html",
|
||||
user=user,
|
||||
email=mailbox_email,
|
||||
provider=capitalized_name,
|
||||
),
|
||||
|
@ -2,6 +2,7 @@ import urllib
|
||||
from email.header import Header
|
||||
from email.message import Message
|
||||
|
||||
from app import config
|
||||
from app.email import headers
|
||||
from app.email_utils import add_or_replace_header, delete_header
|
||||
from app.handler.unsubscribe_encoder import (
|
||||
@ -45,8 +46,17 @@ class UnsubscribeGenerator:
|
||||
if start == -1 or end == -1 or start >= end:
|
||||
continue
|
||||
method = raw_method[start + 1 : end]
|
||||
url_data = urllib.parse.urlparse(method)
|
||||
try:
|
||||
url_data = urllib.parse.urlparse(method)
|
||||
except ValueError:
|
||||
LOG.debug(f"Unsub has invalid method {method}. Ignoring.")
|
||||
continue
|
||||
if url_data.scheme == "mailto":
|
||||
if url_data.path == config.UNSUBSCRIBER:
|
||||
LOG.debug(
|
||||
f"Skipping replacing unsubscribe since the original email already points to {config.UNSUBSCRIBER}"
|
||||
)
|
||||
return message
|
||||
query_data = urllib.parse.parse_qs(url_data.query)
|
||||
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
||||
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")
|
||||
|
@ -5,6 +5,7 @@ from typing import Optional
|
||||
from aiosmtpd.smtp import Envelope
|
||||
|
||||
from app import config
|
||||
from app import alias_utils
|
||||
from app.db import Session
|
||||
from app.email import headers, status
|
||||
from app.email_utils import (
|
||||
@ -101,7 +102,10 @@ class UnsubscribeHandler:
|
||||
mailbox.email, alias
|
||||
):
|
||||
return status.E509
|
||||
alias.enabled = False
|
||||
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False via unsubscribe header"
|
||||
)
|
||||
Session.commit()
|
||||
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
||||
for mailbox in alias.mailboxes:
|
||||
|
@ -30,7 +30,10 @@ def handle_batch_import(batch_import: BatchImport):
|
||||
|
||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||
r = requests.get(file_url)
|
||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
||||
# Replace invisible character
|
||||
lines = [
|
||||
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
|
||||
]
|
||||
|
||||
import_from_csv(batch_import, user, lines)
|
||||
|
||||
|
52
app/app/jobs/event_jobs.py
Normal file
52
app/app/jobs/event_jobs.py
Normal file
@ -0,0 +1,52 @@
|
||||
import newrelic.agent
|
||||
|
||||
from app.events.event_dispatcher import EventDispatcher, Dispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
|
||||
from app.log import LOG
|
||||
from app.models import User, Alias
|
||||
|
||||
|
||||
def send_alias_creation_events_for_user(
|
||||
user: User, dispatcher: Dispatcher, chunk_size=50
|
||||
):
|
||||
if user.disabled:
|
||||
LOG.i("User {user} is disabled. Skipping sending events for that user")
|
||||
return
|
||||
chunk_size = min(chunk_size, 50)
|
||||
event_list = []
|
||||
LOG.i("Sending alias create events for user {user}")
|
||||
for alias in (
|
||||
Alias.yield_per_query(chunk_size)
|
||||
.filter_by(user_id=user.id)
|
||||
.order_by(Alias.id.asc())
|
||||
):
|
||||
event_list.append(
|
||||
AliasCreated(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
note=alias.note,
|
||||
enabled=alias.enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
)
|
||||
if len(event_list) >= chunk_size:
|
||||
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
|
||||
EventDispatcher.send_event(
|
||||
user,
|
||||
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
|
||||
dispatcher=dispatcher,
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/event_alias_created_event", len(event_list)
|
||||
)
|
||||
event_list = []
|
||||
if len(event_list) > 0:
|
||||
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
|
||||
EventDispatcher.send_event(
|
||||
user,
|
||||
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
|
||||
dispatcher=dispatcher,
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/event_alias_created_event", len(event_list)
|
||||
)
|
@ -12,6 +12,7 @@ import arrow
|
||||
import sqlalchemy
|
||||
|
||||
from app import config
|
||||
from app.constants import JobType
|
||||
from app.db import Session
|
||||
from app.email import headers
|
||||
from app.email_utils import (
|
||||
@ -137,7 +138,9 @@ class ExportUserDataJob:
|
||||
msg[headers.SUBJECT] = "Your SimpleLogin data"
|
||||
msg[headers.FROM] = f'"SimpleLogin (noreply)" <{config.NOREPLY}>'
|
||||
msg[headers.TO] = to_email
|
||||
msg.attach(MIMEText(render("transactional/user-report.html"), "html"))
|
||||
msg.attach(
|
||||
MIMEText(render("transactional/user-report.html", user=self._user), "html")
|
||||
)
|
||||
attachment = MIMEApplication(zipped_contents.read())
|
||||
attachment.add_header(
|
||||
"Content-Disposition", "attachment", filename="user_report.zip"
|
||||
@ -172,7 +175,7 @@ class ExportUserDataJob:
|
||||
jobs_in_db = (
|
||||
Session.query(Job)
|
||||
.filter(
|
||||
Job.name == config.JOB_SEND_USER_REPORT,
|
||||
Job.name == JobType.SEND_USER_REPORT.value,
|
||||
Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT)
|
||||
== str(self._user.id),
|
||||
Job.taken.is_(False),
|
||||
@ -182,7 +185,7 @@ class ExportUserDataJob:
|
||||
if jobs_in_db > 0:
|
||||
return None
|
||||
return Job.create(
|
||||
name=config.JOB_SEND_USER_REPORT,
|
||||
name=JobType.SEND_USER_REPORT.value,
|
||||
payload={"user_id": self._user.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
|
72
app/app/jobs/send_event_job.py
Normal file
72
app/app/jobs/send_event_job.py
Normal file
@ -0,0 +1,72 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
|
||||
from app.constants import JobType
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.events.generated import event_pb2
|
||||
from app.events.generated.event_pb2 import EventContent
|
||||
from app.models import (
|
||||
User,
|
||||
Job,
|
||||
PartnerUser,
|
||||
)
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from events.event_sink import EventSink
|
||||
|
||||
|
||||
class SendEventToWebhookJob:
|
||||
def __init__(self, user: User, event: EventContent):
|
||||
self._user: User = user
|
||||
self._event: EventContent = event
|
||||
|
||||
def run(self, sink: EventSink) -> bool:
|
||||
# Check if the current user has a partner_id
|
||||
try:
|
||||
proton_partner_id = get_proton_partner().id
|
||||
except ProtonPartnerNotSetUp:
|
||||
return False
|
||||
|
||||
# It has. Retrieve the information for the PartnerUser
|
||||
partner_user = PartnerUser.get_by(
|
||||
user_id=self._user.id, partner_id=proton_partner_id
|
||||
)
|
||||
if partner_user is None:
|
||||
return True
|
||||
event = event_pb2.Event(
|
||||
user_id=self._user.id,
|
||||
external_user_id=partner_user.external_user_id,
|
||||
partner_id=partner_user.partner_id,
|
||||
content=self._event,
|
||||
)
|
||||
|
||||
serialized = event.SerializeToString()
|
||||
return sink.send_data_to_webhook(serialized)
|
||||
|
||||
@staticmethod
|
||||
def create_from_job(job: Job) -> Optional[SendEventToWebhookJob]:
|
||||
user = User.get(job.payload["user_id"])
|
||||
if not user:
|
||||
return None
|
||||
event_data = base64.b64decode(job.payload["event"])
|
||||
event = event_pb2.EventContent()
|
||||
event.ParseFromString(event_data)
|
||||
|
||||
return SendEventToWebhookJob(user=user, event=event)
|
||||
|
||||
def store_job_in_db(
|
||||
self, run_at: Optional[arrow.Arrow], commit: bool = True
|
||||
) -> Job:
|
||||
stub = self._event.SerializeToString()
|
||||
return Job.create(
|
||||
name=JobType.SEND_EVENT_TO_WEBHOOK.value,
|
||||
payload={
|
||||
"user_id": self._user.id,
|
||||
"event": base64.b64encode(stub).decode("utf-8"),
|
||||
},
|
||||
run_at=run_at if run_at is not None else arrow.now(),
|
||||
commit=commit,
|
||||
)
|
@ -10,7 +10,7 @@ from app.config import (
|
||||
|
||||
# this format allows clickable link to code source in PyCharm
|
||||
_log_format = (
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - "
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - %(request_id)s"
|
||||
'"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s'
|
||||
)
|
||||
_log_formatter = logging.Formatter(_log_format)
|
||||
@ -37,6 +37,21 @@ class EmailHandlerFilter(logging.Filter):
|
||||
return _MESSAGE_ID
|
||||
|
||||
|
||||
class RequestIdFilter(logging.Filter):
|
||||
"""automatically add request-id to keep track of a request"""
|
||||
|
||||
def filter(self, record):
|
||||
from flask import g, has_request_context
|
||||
|
||||
request_id = ""
|
||||
if has_request_context() and hasattr(g, "request_id"):
|
||||
ctx_request_id = getattr(g, "request_id")
|
||||
if ctx_request_id:
|
||||
request_id = f"{ctx_request_id} - "
|
||||
record.request_id = request_id
|
||||
return True
|
||||
|
||||
|
||||
def _get_console_handler():
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_handler.setFormatter(_log_formatter)
|
||||
@ -54,6 +69,7 @@ def _get_logger(name) -> logging.Logger:
|
||||
logger.addHandler(_get_console_handler())
|
||||
|
||||
logger.addFilter(EmailHandlerFilter())
|
||||
logger.addFilter(RequestIdFilter())
|
||||
|
||||
# no propagation to avoid propagating to root logger
|
||||
logger.propagate = False
|
||||
|
@ -76,7 +76,6 @@ class SendRequest:
|
||||
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
||||
self.save_request_to_file(file_path)
|
||||
|
||||
@staticmethod
|
||||
def save_request_to_failed_dir(self, prefix: str = "DeliveryRetryFail"):
|
||||
file_name = (
|
||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
|
510
app/app/mailbox_utils.py
Normal file
510
app/app/mailbox_utils.py
Normal file
@ -0,0 +1,510 @@
|
||||
import dataclasses
|
||||
import secrets
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app import config
|
||||
from app.constants import JobType
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
send_email,
|
||||
render,
|
||||
get_email_domain_part,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import User, Mailbox, Job, MailboxActivation, Alias
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import canonicalize_email, sanitize_email
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class CreateMailboxOutput:
|
||||
mailbox: Mailbox
|
||||
activation: Optional[MailboxActivation]
|
||||
|
||||
|
||||
class MailboxError(Exception):
|
||||
def __init__(self, msg: str):
|
||||
self.msg = msg
|
||||
|
||||
|
||||
class OnlyPaidError(MailboxError):
|
||||
def __init__(self):
|
||||
self.msg = "Only available for paid plans"
|
||||
|
||||
|
||||
class CannotVerifyError(MailboxError):
|
||||
def __init__(self, msg: str, deleted_activation_code: bool = False):
|
||||
self.msg = msg
|
||||
self.deleted_activation_code = deleted_activation_code
|
||||
|
||||
|
||||
MAX_ACTIVATION_TRIES = 3
|
||||
|
||||
|
||||
def create_mailbox(
|
||||
user: User,
|
||||
email: str,
|
||||
verified: bool = False,
|
||||
send_email: bool = True,
|
||||
use_digit_codes: bool = False,
|
||||
send_link: bool = True,
|
||||
) -> CreateMailboxOutput:
|
||||
email = sanitize_email(email)
|
||||
if not user.is_premium():
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but is not premium"
|
||||
)
|
||||
raise OnlyPaidError()
|
||||
check_email_for_mailbox(email, user)
|
||||
new_mailbox: Mailbox = Mailbox.create(
|
||||
email=email, user_id=user.id, verified=verified, commit=True
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateMailbox,
|
||||
message=f"Create mailbox {new_mailbox.id} ({new_mailbox.email}). Verified={verified}",
|
||||
commit=True,
|
||||
)
|
||||
|
||||
if verified:
|
||||
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
|
||||
return CreateMailboxOutput(mailbox=new_mailbox, activation=None)
|
||||
|
||||
LOG.i(f"User {user} has created mailbox with {email}")
|
||||
activation = generate_activation_code(new_mailbox, use_digit_code=use_digit_codes)
|
||||
output = CreateMailboxOutput(mailbox=new_mailbox, activation=activation)
|
||||
|
||||
if not send_email:
|
||||
LOG.i(f"Skipping sending validation email for mailbox {new_mailbox}")
|
||||
return output
|
||||
|
||||
send_verification_email(
|
||||
user,
|
||||
new_mailbox,
|
||||
activation=activation,
|
||||
send_link=send_link,
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
def check_email_for_mailbox(email, user):
|
||||
if not is_valid_email(email):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
||||
)
|
||||
raise MailboxError("Invalid email")
|
||||
elif mailbox_already_used(email, user):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but email is already used"
|
||||
)
|
||||
raise MailboxError("Email already used")
|
||||
elif not email_can_be_used_as_mailbox(email):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
||||
)
|
||||
raise MailboxError("Invalid email")
|
||||
|
||||
|
||||
def delete_mailbox(
|
||||
user: User,
|
||||
mailbox_id: int,
|
||||
transfer_mailbox_id: Optional[int],
|
||||
send_mail: bool = True,
|
||||
) -> Mailbox:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != user.id:
|
||||
LOG.i(
|
||||
f"User {user} has tried to delete another user's mailbox with {mailbox_id}"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
|
||||
if mailbox.id == user.default_mailbox_id:
|
||||
LOG.i(f"User {user} has tried to delete the default mailbox")
|
||||
raise MailboxError("Cannot delete your default mailbox")
|
||||
|
||||
if transfer_mailbox_id and transfer_mailbox_id > 0:
|
||||
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
|
||||
|
||||
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
|
||||
LOG.i(
|
||||
f"User {user} has tried to transfer to a mailbox owned by another user"
|
||||
)
|
||||
raise MailboxError("You must transfer the aliases to a mailbox you own")
|
||||
|
||||
if transfer_mailbox.id == mailbox.id:
|
||||
LOG.i(
|
||||
f"User {user} has tried to transfer to the same mailbox he is deleting"
|
||||
)
|
||||
raise MailboxError(
|
||||
"You can not transfer the aliases to the mailbox you want to delete"
|
||||
)
|
||||
|
||||
if not transfer_mailbox.verified:
|
||||
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
|
||||
raise MailboxError("Your new mailbox is not verified")
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.i(
|
||||
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
|
||||
)
|
||||
Job.create(
|
||||
name=JobType.DELETE_MAILBOX.value,
|
||||
payload={
|
||||
"mailbox_id": mailbox.id,
|
||||
"transfer_mailbox_id": transfer_mailbox_id
|
||||
if transfer_mailbox_id and transfer_mailbox_id > 0
|
||||
else None,
|
||||
"send_mail": send_mail,
|
||||
},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
return mailbox
|
||||
|
||||
|
||||
def clear_activation_codes_for_mailbox(mailbox: Mailbox):
|
||||
Session.query(MailboxActivation).filter(
|
||||
MailboxActivation.mailbox_id == mailbox.id
|
||||
).delete()
|
||||
Session.commit()
|
||||
|
||||
|
||||
def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
if mailbox.user_id != user.id:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
if mailbox.verified and not mailbox.new_email:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
||||
)
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
return mailbox
|
||||
|
||||
activation = (
|
||||
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
|
||||
.order_by(MailboxActivation.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
if not activation:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because there is no activation"
|
||||
)
|
||||
raise MailboxError("Invalid code")
|
||||
if activation.tries >= MAX_ACTIVATION_TRIES:
|
||||
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
raise CannotVerifyError(
|
||||
"Invalid activation code. Please request another code.",
|
||||
deleted_activation_code=True,
|
||||
)
|
||||
if activation.created_at < arrow.now().shift(minutes=-15):
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
|
||||
)
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
raise CannotVerifyError("Invalid activation code. Please request another code.")
|
||||
if code != activation.code:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because code does not match"
|
||||
)
|
||||
activation.tries = activation.tries + 1
|
||||
Session.commit()
|
||||
raise CannotVerifyError("Invalid activation code")
|
||||
if mailbox.new_email:
|
||||
LOG.i(
|
||||
f"User {user} has verified mailbox email change from {mailbox.email} to {mailbox.new_email}"
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
||||
)
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
mailbox.verified = True
|
||||
elif not mailbox.verified:
|
||||
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
||||
mailbox.verified = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.VerifyMailbox,
|
||||
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
raise MailboxError("That address is already in use")
|
||||
|
||||
else:
|
||||
LOG.i(
|
||||
"User {user} alread has mailbox {mailbox} verified and no pending email change"
|
||||
)
|
||||
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
return mailbox
|
||||
|
||||
|
||||
def generate_activation_code(
|
||||
mailbox: Mailbox, use_digit_code: bool = False
|
||||
) -> MailboxActivation:
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
if use_digit_code:
|
||||
if config.MAILBOX_VERIFICATION_OVERRIDE_CODE:
|
||||
code = config.MAILBOX_VERIFICATION_OVERRIDE_CODE
|
||||
else:
|
||||
code = "{:06d}".format(secrets.randbelow(1000000))[:6]
|
||||
else:
|
||||
code = secrets.token_urlsafe(16)
|
||||
return MailboxActivation.create(
|
||||
mailbox_id=mailbox.id,
|
||||
code=code,
|
||||
tries=0,
|
||||
commit=True,
|
||||
)
|
||||
|
||||
|
||||
def send_verification_email(
|
||||
user: User,
|
||||
mailbox: Mailbox,
|
||||
activation: MailboxActivation,
|
||||
send_link: bool = True,
|
||||
):
|
||||
LOG.i(
|
||||
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
|
||||
)
|
||||
|
||||
if send_link:
|
||||
verification_url = (
|
||||
config.URL
|
||||
+ "/dashboard/mailbox_verify"
|
||||
+ f"?mailbox_id={mailbox.id}&code={activation.code}"
|
||||
)
|
||||
else:
|
||||
verification_url = None
|
||||
|
||||
send_email(
|
||||
mailbox.email,
|
||||
f"Please confirm your mailbox {mailbox.email}",
|
||||
render(
|
||||
"transactional/verify-mailbox.txt.jinja2",
|
||||
user=user,
|
||||
code=activation.code,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox.html",
|
||||
user=user,
|
||||
code=activation.code,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def send_change_email(user: User, mailbox: Mailbox, activation: MailboxActivation):
|
||||
verification_url = f"{config.URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox.id}&code={activation.code}"
|
||||
|
||||
send_email(
|
||||
mailbox.new_email,
|
||||
"Confirm mailbox change on SimpleLogin",
|
||||
render(
|
||||
"transactional/verify-mailbox-change.txt.jinja2",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=mailbox.new_email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox-change.html",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=mailbox.new_email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def request_mailbox_email_change(
|
||||
user: User,
|
||||
mailbox: Mailbox,
|
||||
new_email: str,
|
||||
email_ownership_verified: bool = False,
|
||||
send_email: bool = True,
|
||||
use_digit_codes: bool = False,
|
||||
) -> CreateMailboxOutput:
|
||||
new_email = sanitize_email(new_email)
|
||||
if new_email == mailbox.email:
|
||||
raise MailboxError("Same email")
|
||||
check_email_for_mailbox(new_email, user)
|
||||
if email_ownership_verified:
|
||||
mailbox.email = new_email
|
||||
mailbox.new_email = None
|
||||
mailbox.verified = True
|
||||
else:
|
||||
mailbox.new_email = new_email
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Updated mailbox {mailbox.id} email ({new_email}) pre-verified({email_ownership_verified}",
|
||||
)
|
||||
try:
|
||||
Session.commit()
|
||||
except IntegrityError:
|
||||
LOG.i(f"This email {new_email} is already pending for some mailbox")
|
||||
Session.rollback()
|
||||
raise MailboxError("Email already in use")
|
||||
|
||||
if email_ownership_verified:
|
||||
LOG.i(f"User {user} as created a pre-verified mailbox with {new_email}")
|
||||
return CreateMailboxOutput(mailbox=mailbox, activation=None)
|
||||
|
||||
LOG.i(f"User {user} has updated mailbox email with {new_email}")
|
||||
activation = generate_activation_code(mailbox, use_digit_code=use_digit_codes)
|
||||
output = CreateMailboxOutput(mailbox=mailbox, activation=activation)
|
||||
|
||||
if not send_email:
|
||||
LOG.i(f"Skipping sending validation email for mailbox {mailbox}")
|
||||
return output
|
||||
|
||||
send_change_email(
|
||||
user,
|
||||
mailbox,
|
||||
activation=activation,
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
class MailboxEmailChangeError(Enum):
|
||||
InvalidId = 1
|
||||
EmailAlreadyUsed = 2
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class MailboxEmailChangeResult:
|
||||
error: Optional[MailboxEmailChangeError]
|
||||
message: str
|
||||
message_category: str
|
||||
|
||||
|
||||
def perform_mailbox_email_change(mailbox_id: int) -> MailboxEmailChangeResult:
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
|
||||
# new_email can be None if user cancels change in the meantime
|
||||
if mailbox and mailbox.new_email:
|
||||
user = mailbox.user
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
return MailboxEmailChangeResult(
|
||||
error=MailboxEmailChangeError.EmailAlreadyUsed,
|
||||
message=f"{mailbox.new_email} is already used",
|
||||
message_category="error",
|
||||
)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
||||
)
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
|
||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox change %s is verified", mailbox)
|
||||
return MailboxEmailChangeResult(
|
||||
error=None,
|
||||
message=f"The {mailbox.email} is updated",
|
||||
message_category="success",
|
||||
)
|
||||
else:
|
||||
return MailboxEmailChangeResult(
|
||||
error=MailboxEmailChangeError.InvalidId,
|
||||
message="Invalid link",
|
||||
message_category="error",
|
||||
)
|
||||
|
||||
|
||||
def cancel_email_change(mailbox_id: int, user: User):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
LOG.i(
|
||||
f"User {user} has tried to cancel a mailbox an unknown mailbox {mailbox_id}"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
if mailbox.user.id != user.id:
|
||||
LOG.i(
|
||||
f"User {user} has tried to cancel a mailbox {mailbox} owned by another user"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
mailbox.new_email = None
|
||||
LOG.i(f"User {mailbox.user} has cancelled mailbox email change")
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
|
||||
|
||||
def __get_alias_mailbox_from_email(
|
||||
email_address: str, alias: Alias
|
||||
) -> Optional[Mailbox]:
|
||||
for mailbox in alias.mailboxes:
|
||||
if mailbox.email == email_address:
|
||||
return mailbox
|
||||
|
||||
for authorized_address in mailbox.authorized_addresses:
|
||||
if authorized_address.email == email_address:
|
||||
LOG.d(
|
||||
"Found an authorized address for %s %s %s",
|
||||
alias,
|
||||
mailbox,
|
||||
authorized_address,
|
||||
)
|
||||
return mailbox
|
||||
return None
|
||||
|
||||
|
||||
def __get_alias_mailbox_from_email_or_canonical_email(
|
||||
email_address: str, alias: Alias
|
||||
) -> Optional[Mailbox]:
|
||||
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
||||
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
||||
mbox = __get_alias_mailbox_from_email(email_address, alias)
|
||||
if mbox is not None:
|
||||
return mbox
|
||||
canonical_email = canonicalize_email(email_address)
|
||||
if canonical_email != email_address:
|
||||
return __get_alias_mailbox_from_email(canonical_email, alias)
|
||||
return None
|
||||
|
||||
|
||||
def get_mailbox_for_reply_phase(
|
||||
envelope_mail_from: str, header_mail_from: str, alias
|
||||
) -> Optional[Mailbox]:
|
||||
"""return the corresponding mailbox given the mail_from and alias
|
||||
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
||||
"""
|
||||
mbox = __get_alias_mailbox_from_email_or_canonical_email(envelope_mail_from, alias)
|
||||
if mbox is not None:
|
||||
return mbox
|
||||
if not header_mail_from:
|
||||
return None
|
||||
envelope_from_domain = get_email_domain_part(envelope_mail_from)
|
||||
header_from_domain = get_email_domain_part(header_mail_from)
|
||||
if envelope_from_domain != header_from_domain:
|
||||
return None
|
||||
# For services that use VERP sending (envelope from has encoded data to account for bounces)
|
||||
# if the domain is the same in the header from as the envelope from we can use the header from
|
||||
return __get_alias_mailbox_from_email_or_canonical_email(header_mail_from, alias)
|
@ -24,14 +24,15 @@ from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy_utils import ArrowType
|
||||
|
||||
from app import config, rate_limiter
|
||||
from app import s3
|
||||
from app.constants import JobType
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_mx_domains
|
||||
|
||||
from app.errors import (
|
||||
AliasInTrashError,
|
||||
DirectoryInTrashError,
|
||||
@ -157,6 +158,8 @@ class File(Base, ModelMixin):
|
||||
path = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_file_user_id", "user_id"),)
|
||||
|
||||
def get_url(self, expires_in=3600):
|
||||
return s3.get_url(self.path, expires_in)
|
||||
|
||||
@ -236,6 +239,7 @@ class AuditLogActionEnum(EnumE):
|
||||
disable_user = 9
|
||||
enable_user = 10
|
||||
stop_trial = 11
|
||||
unlink_user = 12
|
||||
|
||||
|
||||
class Phase(EnumE):
|
||||
@ -263,6 +267,21 @@ class UnsubscribeBehaviourEnum(EnumE):
|
||||
PreserveOriginal = 2
|
||||
|
||||
|
||||
class AliasDeleteReason(EnumE):
|
||||
Unspecified = 0
|
||||
UserHasBeenDeleted = 1
|
||||
ManualAction = 2
|
||||
DirectoryDeleted = 3
|
||||
MailboxDeleted = 4
|
||||
CustomDomainDeleted = 5
|
||||
|
||||
|
||||
class JobPriority(EnumE):
|
||||
Low = 1
|
||||
Default = 50
|
||||
High = 100
|
||||
|
||||
|
||||
class IntEnumType(sa.types.TypeDecorator):
|
||||
impl = sa.Integer
|
||||
|
||||
@ -309,6 +328,8 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
||||
|
||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_hibp_notified_alias_user_id", "user_id"),)
|
||||
|
||||
|
||||
class Fido(Base, ModelMixin):
|
||||
__tablename__ = "fido"
|
||||
@ -323,13 +344,16 @@ class Fido(Base, ModelMixin):
|
||||
name = sa.Column(sa.String(128), nullable=False, unique=False)
|
||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_fido_user_id", "user_id"),)
|
||||
|
||||
|
||||
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
__tablename__ = "users"
|
||||
|
||||
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
|
||||
FLAG_FREE_DISABLE_CREATE_CONTACTS = 1 << 0
|
||||
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
||||
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
||||
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
||||
|
||||
email = sa.Column(sa.String(256), unique=True, nullable=False)
|
||||
|
||||
@ -345,7 +369,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||
)
|
||||
|
||||
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
||||
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||
|
||||
# an account can be disabled if having harmful behavior
|
||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||
@ -525,10 +549,15 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||
)
|
||||
|
||||
# user opted in for data breach check
|
||||
enable_data_breach_check = sa.Column(
|
||||
sa.Boolean, default=False, nullable=False, server_default="0"
|
||||
)
|
||||
|
||||
# bitwise flags. Allow for future expansion
|
||||
flags = sa.Column(
|
||||
sa.BigInteger,
|
||||
default=FLAG_FREE_DISABLE_CREATE_ALIAS,
|
||||
default=FLAG_FREE_DISABLE_CREATE_CONTACTS,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
)
|
||||
@ -549,6 +578,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||
),
|
||||
sa.Index("ix_users_delete_on", delete_on),
|
||||
sa.Index("ix_users_default_mailbox_id", default_mailbox_id),
|
||||
sa.Index(
|
||||
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
|
||||
),
|
||||
sa.Index("ix_users_profile_picture_id", profile_picture_id),
|
||||
sa.Index(
|
||||
"idx_users_email_trgm",
|
||||
"email",
|
||||
postgresql_ops={"email": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
@ -601,14 +641,23 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
if "alternative_id" not in kwargs:
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
trail = ". Created from partner" if from_partner else ""
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateUser,
|
||||
message=f"Created user {email}{trail}",
|
||||
)
|
||||
|
||||
# If the user is created from partner, do not notify
|
||||
# nor give a trial
|
||||
if from_partner:
|
||||
user.flags = User.FLAG_CREATED_FROM_PARTNER
|
||||
user.flags = user.flags | User.FLAG_CREATED_FROM_PARTNER
|
||||
user.notification = False
|
||||
user.trial_end = None
|
||||
Job.create(
|
||||
name=config.JOB_SEND_PROTON_WELCOME_1,
|
||||
name=JobType.SEND_PROTON_WELCOME_1.value,
|
||||
payload={"user_id": user.id},
|
||||
run_at=arrow.now(),
|
||||
)
|
||||
@ -634,17 +683,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
# Schedule onboarding emails
|
||||
Job.create(
|
||||
name=config.JOB_ONBOARDING_1,
|
||||
name=JobType.ONBOARDING_1.value,
|
||||
payload={"user_id": user.id},
|
||||
run_at=arrow.now().shift(days=1),
|
||||
)
|
||||
Job.create(
|
||||
name=config.JOB_ONBOARDING_2,
|
||||
name=JobType.ONBOARDING_2.value,
|
||||
payload={"user_id": user.id},
|
||||
run_at=arrow.now().shift(days=2),
|
||||
)
|
||||
Job.create(
|
||||
name=config.JOB_ONBOARDING_4,
|
||||
name=JobType.ONBOARDING_4.value,
|
||||
payload={"user_id": user.id},
|
||||
run_at=arrow.now().shift(days=3),
|
||||
)
|
||||
@ -652,6 +701,27 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
def delete(cls, obj_id, commit=False):
|
||||
# Internal import to avoid global import cycles
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import UserDeleted, EventContent
|
||||
|
||||
user: User = cls.get(obj_id)
|
||||
EventDispatcher.send_event(user, EventContent(user_deleted=UserDeleted()))
|
||||
|
||||
# Manually delete all aliases for the user that is about to be deleted
|
||||
from app.alias_utils import delete_alias
|
||||
|
||||
for alias in Alias.filter_by(user_id=user.id):
|
||||
delete_alias(alias, user, AliasDeleteReason.UserHasBeenDeleted)
|
||||
|
||||
res = super(User, cls).delete(obj_id)
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
return res
|
||||
|
||||
def get_active_subscription(
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> Optional[
|
||||
@ -937,7 +1007,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
def has_custom_domain(self):
|
||||
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
|
||||
|
||||
def custom_domains(self):
|
||||
def custom_domains(self) -> List["CustomDomain"]:
|
||||
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
||||
|
||||
def available_domains_for_random_alias(
|
||||
@ -949,8 +1019,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
- the domain
|
||||
"""
|
||||
res = []
|
||||
for domain in self.available_sl_domains(alias_options=alias_options):
|
||||
res.append((True, domain))
|
||||
for domain in self.get_sl_domains(alias_options=alias_options):
|
||||
res.append((True, domain.domain))
|
||||
|
||||
for custom_domain in self.verified_custom_domains():
|
||||
res.append((False, custom_domain.domain))
|
||||
@ -1092,7 +1162,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
- Verified custom domains
|
||||
|
||||
"""
|
||||
domains = self.available_sl_domains(alias_options=alias_options)
|
||||
domains = [
|
||||
sl_domain.domain
|
||||
for sl_domain in self.get_sl_domains(alias_options=alias_options)
|
||||
]
|
||||
|
||||
for custom_domain in self.verified_custom_domains():
|
||||
domains.append(custom_domain.domain)
|
||||
@ -1129,10 +1202,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
def can_create_contacts(self) -> bool:
|
||||
if self.is_premium():
|
||||
return True
|
||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
|
||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_CONTACTS == 0:
|
||||
return True
|
||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||
|
||||
def has_used_alias_from_partner(self) -> bool:
|
||||
return (
|
||||
self.flags
|
||||
& (User.FLAG_CREATED_ALIAS_FROM_PARTNER | User.FLAG_CREATED_FROM_PARTNER)
|
||||
> 0
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<User {self.id} {self.name} {self.email}>"
|
||||
|
||||
@ -1165,6 +1245,8 @@ class ActivationCode(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||
|
||||
__table_args__ = (sa.Index("ix_activation_code_user_id", "user_id"),)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1181,6 +1263,8 @@ class ResetPasswordCode(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||
|
||||
__table_args__ = (sa.Index("ix_reset_password_code_user_id", "user_id"),)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1223,6 +1307,8 @@ class MfaBrowser(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (sa.Index("ix_mfa_browser_user_id", "user_id"),)
|
||||
|
||||
@classmethod
|
||||
def create_new(cls, user, token_length=64) -> "MfaBrowser":
|
||||
found = False
|
||||
@ -1281,6 +1367,12 @@ class Client(Base, ModelMixin):
|
||||
user = orm.relationship(User)
|
||||
referral = orm.relationship("Referral")
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_client_user_id", "user_id"),
|
||||
sa.Index("ix_client_icon_id", "icon_id"),
|
||||
sa.Index("ix_client_referral_id", "referral_id"),
|
||||
)
|
||||
|
||||
def nb_user(self):
|
||||
return ClientUser.filter_by(client_id=self.id).count()
|
||||
|
||||
@ -1329,6 +1421,8 @@ class RedirectUri(Base, ModelMixin):
|
||||
|
||||
client = orm.relationship(Client, backref="redirect_uris")
|
||||
|
||||
__table_args__ = (sa.Index("ix_redirect_uri_client_id", "client_id"),)
|
||||
|
||||
|
||||
class AuthorizationCode(Base, ModelMixin):
|
||||
__tablename__ = "authorization_code"
|
||||
@ -1350,6 +1444,11 @@ class AuthorizationCode(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_authorization_code_client_id", "client_id"),
|
||||
sa.Index("ix_authorization_code_user_id", "user_id"),
|
||||
)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1372,6 +1471,11 @@ class OauthToken(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_oauth_token_user_id", "user_id"),
|
||||
sa.Index("ix_oauth_token_client_id", "client_id"),
|
||||
)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1422,6 +1526,9 @@ def generate_random_alias_email(
|
||||
|
||||
class Alias(Base, ModelMixin):
|
||||
__tablename__ = "alias"
|
||||
|
||||
FLAG_PARTNER_CREATED = 1 << 0
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
@ -1431,6 +1538,9 @@ class Alias(Base, ModelMixin):
|
||||
name = sa.Column(sa.String(128), nullable=True, default=None)
|
||||
|
||||
enabled = sa.Column(sa.Boolean(), default=True, nullable=False)
|
||||
flags = sa.Column(
|
||||
sa.BigInteger(), default=0, server_default="0", nullable=False, index=True
|
||||
)
|
||||
|
||||
custom_domain_id = sa.Column(
|
||||
sa.ForeignKey("custom_domain.id", ondelete="cascade"), nullable=True, index=True
|
||||
@ -1519,6 +1629,7 @@ class Alias(Base, ModelMixin):
|
||||
postgresql_ops={"note": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
),
|
||||
Index("ix_alias_original_owner_id", "original_owner_id"),
|
||||
)
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
@ -1561,7 +1672,7 @@ class Alias(Base, ModelMixin):
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_custom_domain(alias_address) -> Optional["CustomDomain"]:
|
||||
def get_custom_domain(alias_address: str) -> Optional["CustomDomain"]:
|
||||
alias_domain = validate_email(
|
||||
alias_address, check_deliverability=False, allow_smtputf8=False
|
||||
).domain
|
||||
@ -1604,16 +1715,44 @@ class Alias(Base, ModelMixin):
|
||||
custom_domain = Alias.get_custom_domain(email)
|
||||
if custom_domain:
|
||||
new_alias.custom_domain_id = custom_domain.id
|
||||
else:
|
||||
custom_domain = CustomDomain.get(kw["custom_domain_id"])
|
||||
# If it comes from a custom domain created from partner. Mark it as created from partner
|
||||
if custom_domain is not None and custom_domain.partner_id is not None:
|
||||
new_alias.flags = (new_alias.flags or 0) | Alias.FLAG_PARTNER_CREATED
|
||||
|
||||
Session.add(new_alias)
|
||||
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
||||
|
||||
if (
|
||||
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
|
||||
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
|
||||
):
|
||||
user.flags = user.flags | User.FLAG_CREATED_ALIAS_FROM_PARTNER
|
||||
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
if flush:
|
||||
Session.flush()
|
||||
|
||||
# Internal import to avoid global import cycles
|
||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
||||
|
||||
event = AliasCreated(
|
||||
id=new_alias.id,
|
||||
email=new_alias.email,
|
||||
note=new_alias.note,
|
||||
enabled=True,
|
||||
created_at=int(new_alias.created_at.timestamp),
|
||||
)
|
||||
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
||||
emit_alias_audit_log(
|
||||
new_alias, AliasAuditLogAction.CreateAlias, "New alias created"
|
||||
)
|
||||
|
||||
return new_alias
|
||||
|
||||
@classmethod
|
||||
@ -1792,17 +1931,22 @@ class Contact(Base, ModelMixin):
|
||||
|
||||
MAX_NAME_LENGTH = 512
|
||||
|
||||
FLAG_PARTNER_CREATED = 1 << 0
|
||||
|
||||
__tablename__ = "contact"
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"),
|
||||
sa.Index("ix_contact_user_id_id", "user_id", "id"),
|
||||
)
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
sa.ForeignKey(User.id, ondelete="cascade"),
|
||||
nullable=False,
|
||||
)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
name = sa.Column(
|
||||
@ -1850,6 +1994,9 @@ class Contact(Base, ModelMixin):
|
||||
# whether contact is created automatically during the forward phase
|
||||
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
|
||||
|
||||
# contact flags
|
||||
flags = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
||||
|
||||
@property
|
||||
def email(self):
|
||||
return self.website_email
|
||||
@ -1979,11 +2126,15 @@ class Contact(Base, ModelMixin):
|
||||
|
||||
class EmailLog(Base, ModelMixin):
|
||||
__tablename__ = "email_log"
|
||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
__table_args__ = (
|
||||
Index("ix_email_log_created_at", "created_at"),
|
||||
Index("ix_email_log_mailbox_id", "mailbox_id"),
|
||||
Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
|
||||
Index("ix_email_log_refused_email_id", "refused_email_id"),
|
||||
Index("ix_email_log_user_id_email_log_id", "user_id", "id"),
|
||||
)
|
||||
|
||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||
contact_id = sa.Column(
|
||||
sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
@ -2209,6 +2360,12 @@ class DeletedAlias(Base, ModelMixin):
|
||||
__tablename__ = "deleted_alias"
|
||||
|
||||
email = sa.Column(sa.String(256), unique=True, nullable=False)
|
||||
reason = sa.Column(
|
||||
IntEnumType(AliasDeleteReason),
|
||||
nullable=False,
|
||||
default=AliasDeleteReason.Unspecified,
|
||||
server_default=str(AliasDeleteReason.Unspecified.value),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
@ -2249,10 +2406,12 @@ class AliasUsedOn(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||
sa.Index("ix_alias_used_on_user_id", "user_id"),
|
||||
)
|
||||
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
||||
nullable=False,
|
||||
)
|
||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||
|
||||
@ -2275,6 +2434,8 @@ class ApiKey(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (sa.Index("ix_api_key_user_id", "user_id"),)
|
||||
|
||||
@classmethod
|
||||
def create(cls, user_id, name=None, **kwargs):
|
||||
code = random_string(60)
|
||||
@ -2342,6 +2503,18 @@ class CustomDomain(Base, ModelMixin):
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
partner_id = sa.Column(
|
||||
sa.Integer,
|
||||
sa.ForeignKey("partner.id"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
server_default=None,
|
||||
)
|
||||
|
||||
pending_deletion = sa.Column(
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"ix_unique_domain", # Index name
|
||||
@ -2349,6 +2522,8 @@ class CustomDomain(Base, ModelMixin):
|
||||
unique=True,
|
||||
postgresql_where=Column("ownership_verified"),
|
||||
), # The condition
|
||||
Index("ix_custom_domain_user_id", "user_id"),
|
||||
Index("ix_custom_domain_pending_deletion", "pending_deletion"),
|
||||
)
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
|
||||
@ -2366,9 +2541,6 @@ class CustomDomain(Base, ModelMixin):
|
||||
def get_trash_url(self):
|
||||
return config.URL + f"/dashboard/domains/{self.id}/trash"
|
||||
|
||||
def get_ownership_dns_txt_value(self):
|
||||
return f"sl-verification={self.ownership_txt_token}"
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
domain = kwargs.get("domain")
|
||||
@ -2396,6 +2568,13 @@ class CustomDomain(Base, ModelMixin):
|
||||
if obj.is_sl_subdomain:
|
||||
DeletedSubdomain.create(domain=obj.domain)
|
||||
|
||||
from app import alias_utils
|
||||
|
||||
for alias in Alias.filter_by(custom_domain_id=obj_id):
|
||||
alias_utils.delete_alias(
|
||||
alias, obj.user, AliasDeleteReason.CustomDomainDeleted
|
||||
)
|
||||
|
||||
return super(CustomDomain, cls).delete(obj_id)
|
||||
|
||||
@property
|
||||
@ -2403,7 +2582,7 @@ class CustomDomain(Base, ModelMixin):
|
||||
return sorted(self._auto_create_rules, key=lambda rule: rule.order)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Custom Domain {self.domain}>"
|
||||
return f"<Custom Domain {self.id} {self.domain}>"
|
||||
|
||||
|
||||
class AutoCreateRule(Base, ModelMixin):
|
||||
@ -2458,6 +2637,7 @@ class DomainDeletedAlias(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
|
||||
sa.Index("ix_domain_deleted_alias_user_id", "user_id"),
|
||||
)
|
||||
|
||||
email = sa.Column(sa.String(256), nullable=False)
|
||||
@ -2468,6 +2648,12 @@ class DomainDeletedAlias(Base, ModelMixin):
|
||||
|
||||
domain = orm.relationship(CustomDomain)
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
reason = sa.Column(
|
||||
IntEnumType(AliasDeleteReason),
|
||||
nullable=False,
|
||||
default=AliasDeleteReason.Unspecified,
|
||||
server_default=str(AliasDeleteReason.Unspecified.value),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
@ -2512,6 +2698,8 @@ class Coupon(Base, ModelMixin):
|
||||
# a coupon can have an expiration
|
||||
expires_date = sa.Column(ArrowType, nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_coupon_used_by_user_id", "used_by_user_id"),)
|
||||
|
||||
|
||||
class Directory(Base, ModelMixin):
|
||||
__tablename__ = "directory"
|
||||
@ -2526,6 +2714,8 @@ class Directory(Base, ModelMixin):
|
||||
"Mailbox", secondary="directory_mailbox", lazy="joined"
|
||||
)
|
||||
|
||||
__table_args__ = (sa.Index("ix_directory_user_id", "user_id"),)
|
||||
|
||||
@property
|
||||
def mailboxes(self):
|
||||
if self._mailboxes:
|
||||
@ -2559,7 +2749,7 @@ class Directory(Base, ModelMixin):
|
||||
for alias in Alias.filter_by(directory_id=obj_id):
|
||||
from app import alias_utils
|
||||
|
||||
alias_utils.delete_alias(alias, user)
|
||||
alias_utils.delete_alias(alias, user, AliasDeleteReason.DirectoryDeleted)
|
||||
|
||||
DeletedDirectory.create(name=obj.name)
|
||||
cls.filter(cls.id == obj_id).delete()
|
||||
@ -2589,6 +2779,16 @@ class Job(Base, ModelMixin):
|
||||
)
|
||||
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
|
||||
taken_at = sa.Column(ArrowType, nullable=True)
|
||||
priority = sa.Column(
|
||||
IntEnumType(JobPriority),
|
||||
default=JobPriority.Default,
|
||||
server_default=str(JobPriority.Default.value),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_state_run_at_taken_at_priority", state, run_at, taken_at, priority),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Job {self.id} {self.name} {self.payload}>"
|
||||
@ -2596,9 +2796,7 @@ class Job(Base, ModelMixin):
|
||||
|
||||
class Mailbox(Base, ModelMixin):
|
||||
__tablename__ = "mailbox"
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||
verified = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||
force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False)
|
||||
@ -2624,7 +2822,17 @@ class Mailbox(Base, ModelMixin):
|
||||
|
||||
generic_subject = sa.Column(sa.String(78), nullable=True)
|
||||
|
||||
__table_args__ = (sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),)
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
|
||||
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
|
||||
# index on email column using pg_trgm
|
||||
Index(
|
||||
"ix_mailbox_email_trgm_idx",
|
||||
"email",
|
||||
postgresql_ops={"email": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
),
|
||||
)
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
|
||||
@ -2635,30 +2843,31 @@ class Mailbox(Base, ModelMixin):
|
||||
return False
|
||||
|
||||
def nb_alias(self):
|
||||
return (
|
||||
AliasMailbox.filter_by(mailbox_id=self.id).count()
|
||||
+ Alias.filter_by(mailbox_id=self.id).count()
|
||||
alias_ids = set(
|
||||
am.alias_id
|
||||
for am in AliasMailbox.filter_by(mailbox_id=self.id).values(
|
||||
AliasMailbox.alias_id
|
||||
)
|
||||
)
|
||||
for alias in Alias.filter_by(mailbox_id=self.id).values(Alias.id):
|
||||
alias_ids.add(alias.id)
|
||||
return len(alias_ids)
|
||||
|
||||
def is_proton(self) -> bool:
|
||||
if (
|
||||
self.email.endswith("@proton.me")
|
||||
or self.email.endswith("@protonmail.com")
|
||||
or self.email.endswith("@protonmail.ch")
|
||||
or self.email.endswith("@proton.ch")
|
||||
or self.email.endswith("@pm.me")
|
||||
):
|
||||
return True
|
||||
for proton_email_domain in config.PROTON_EMAIL_DOMAINS:
|
||||
if self.email.endswith(f"@{proton_email_domain}"):
|
||||
return True
|
||||
|
||||
from app.email_utils import get_email_local_part
|
||||
|
||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
||||
mx_domains = get_mx_domains(get_email_local_part(self.email))
|
||||
|
||||
proton_mx_domains = config.PROTON_MX_SERVERS
|
||||
# Proton is the first domain
|
||||
if mx_domains and mx_domains[0][1] in (
|
||||
"mail.protonmail.ch.",
|
||||
"mailsec.protonmail.ch.",
|
||||
):
|
||||
return True
|
||||
for prio in mx_domains:
|
||||
for mx_domain in mx_domains[prio]:
|
||||
if mx_domain in proton_mx_domains:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@ -2679,7 +2888,7 @@ class Mailbox(Base, ModelMixin):
|
||||
from app import alias_utils
|
||||
|
||||
# only put aliases that have mailbox as a single mailbox into trash
|
||||
alias_utils.delete_alias(alias, user)
|
||||
alias_utils.delete_alias(alias, user, AliasDeleteReason.MailboxDeleted)
|
||||
Session.commit()
|
||||
|
||||
cls.filter(cls.id == obj_id).delete()
|
||||
@ -2687,12 +2896,15 @@ class Mailbox(Base, ModelMixin):
|
||||
|
||||
@property
|
||||
def aliases(self) -> [Alias]:
|
||||
ret = Alias.filter_by(mailbox_id=self.id).all()
|
||||
ret = dict(
|
||||
(alias.id, alias) for alias in Alias.filter_by(mailbox_id=self.id).all()
|
||||
)
|
||||
|
||||
for am in AliasMailbox.filter_by(mailbox_id=self.id):
|
||||
ret.append(am.alias)
|
||||
if am.alias_id not in ret:
|
||||
ret[am.alias_id] = am.alias
|
||||
|
||||
return ret
|
||||
return list(ret.values())
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
@ -2704,6 +2916,16 @@ class Mailbox(Base, ModelMixin):
|
||||
return f"<Mailbox {self.id} {self.email}>"
|
||||
|
||||
|
||||
class MailboxActivation(Base, ModelMixin):
|
||||
__tablename__ = "mailbox_activation"
|
||||
|
||||
mailbox_id = sa.Column(
|
||||
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
code = sa.Column(sa.String(32), nullable=False, index=True)
|
||||
tries = sa.Column(sa.Integer, default=0, nullable=False)
|
||||
|
||||
|
||||
class AccountActivation(Base, ModelMixin):
|
||||
"""contains code to activate the user account when they sign up on mobile"""
|
||||
|
||||
@ -2743,6 +2965,8 @@ class RefusedEmail(Base, ModelMixin):
|
||||
# toggle this when email content (stored at full_report_path & path are deleted)
|
||||
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
__table_args__ = (sa.Index("ix_refused_email_user_id", "user_id"),)
|
||||
|
||||
def get_url(self, expires_in=3600):
|
||||
if self.path:
|
||||
return s3.get_url(self.path, expires_in)
|
||||
@ -2765,6 +2989,8 @@ class Referral(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
|
||||
|
||||
__table_args__ = (sa.Index("ix_referral_user_id", "user_id"),)
|
||||
|
||||
@property
|
||||
def nb_user(self) -> int:
|
||||
return User.filter_by(referral_id=self.id, activated=True).count()
|
||||
@ -2804,6 +3030,12 @@ class SentAlert(Base, ModelMixin):
|
||||
to_email = sa.Column(sa.String(256), nullable=False)
|
||||
alert_type = sa.Column(sa.String(256), nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_sent_alert_user_id", "user_id"),
|
||||
sa.Index("ix_sent_alert_to_email", "to_email"),
|
||||
sa.Index("ix_sent_alert_alert_type", "alert_type"),
|
||||
)
|
||||
|
||||
|
||||
class AliasMailbox(Base, ModelMixin):
|
||||
__tablename__ = "alias_mailbox"
|
||||
@ -2812,7 +3044,8 @@ class AliasMailbox(Base, ModelMixin):
|
||||
)
|
||||
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
||||
nullable=False,
|
||||
)
|
||||
mailbox_id = sa.Column(
|
||||
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
|
||||
@ -2827,7 +3060,8 @@ class AliasHibp(Base, ModelMixin):
|
||||
__table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),)
|
||||
|
||||
alias_id = sa.Column(
|
||||
sa.Integer(), sa.ForeignKey("alias.id", ondelete="cascade"), index=True
|
||||
sa.Integer(),
|
||||
sa.ForeignKey("alias.id", ondelete="cascade"),
|
||||
)
|
||||
hibp_id = sa.Column(
|
||||
sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True
|
||||
@ -2922,11 +3156,7 @@ class RecoveryCode(Base, ModelMixin):
|
||||
@classmethod
|
||||
def find_by_user_code(cls, user: User, code: str):
|
||||
hashed_code = cls._hash_code(code)
|
||||
# TODO: Only return hashed codes once there aren't unhashed codes in the db.
|
||||
found_code = cls.get_by(user_id=user.id, code=hashed_code)
|
||||
if found_code:
|
||||
return found_code
|
||||
return cls.get_by(user_id=user.id, code=code)
|
||||
return cls.get_by(user_id=user.id, code=hashed_code)
|
||||
|
||||
@classmethod
|
||||
def empty(cls, user):
|
||||
@ -2937,7 +3167,9 @@ class RecoveryCode(Base, ModelMixin):
|
||||
|
||||
class Notification(Base, ModelMixin):
|
||||
__tablename__ = "notification"
|
||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
message = sa.Column(sa.Text, nullable=False)
|
||||
title = sa.Column(sa.String(512))
|
||||
|
||||
@ -3019,7 +3251,7 @@ class SLDomain(Base, ModelMixin):
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
|
||||
return f"<SLDomain {self.id} {self.domain} {'Premium' if self.premium_only else 'Free'}>"
|
||||
|
||||
|
||||
class Monitoring(Base, ModelMixin):
|
||||
@ -3051,6 +3283,11 @@ class BatchImport(Base, ModelMixin):
|
||||
file = orm.relationship(File)
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_batch_import_file_id", "file_id"),
|
||||
sa.Index("ix_batch_import_user_id", "user_id"),
|
||||
)
|
||||
|
||||
def nb_alias(self):
|
||||
return Alias.filter_by(batch_import_id=self.id).count()
|
||||
|
||||
@ -3071,6 +3308,7 @@ class AuthorizedAddress(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
|
||||
sa.Index("ix_authorized_address_user_id", "user_id"),
|
||||
)
|
||||
|
||||
mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
|
||||
@ -3178,6 +3416,20 @@ class TransactionalEmail(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
# whether to call Session.commit
|
||||
commit = kw.pop("commit", False)
|
||||
|
||||
r = cls(**kw)
|
||||
if not config.STORE_TRANSACTIONAL_EMAILS:
|
||||
return r
|
||||
|
||||
Session.add(r)
|
||||
if commit:
|
||||
Session.commit()
|
||||
return r
|
||||
|
||||
|
||||
class Payout(Base, ModelMixin):
|
||||
"""Referral payouts"""
|
||||
@ -3198,6 +3450,8 @@ class Payout(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (sa.Index("ix_payout_user_id", "user_id"),)
|
||||
|
||||
|
||||
class IgnoredEmail(Base, ModelMixin):
|
||||
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
|
||||
@ -3299,6 +3553,8 @@ class PhoneReservation(Base, ModelMixin):
|
||||
start = sa.Column(ArrowType, nullable=False)
|
||||
end = sa.Column(ArrowType, nullable=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_phone_reservation_user_id", "user_id"),)
|
||||
|
||||
|
||||
class PhoneMessage(Base, ModelMixin):
|
||||
__tablename__ = "phone_message"
|
||||
@ -3375,6 +3631,7 @@ class AdminAuditLog(Base):
|
||||
action=AuditLogActionEnum.stop_trial.value,
|
||||
model="User",
|
||||
model_id=user_id,
|
||||
data={},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@ -3472,6 +3729,11 @@ class ProviderComplaint(Base, ModelMixin):
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_provider_complaint_user_id", "user_id"),
|
||||
sa.Index("ix_provider_complaint_refused_email_id", "refused_email_id"),
|
||||
)
|
||||
|
||||
|
||||
class PartnerApiToken(Base, ModelMixin):
|
||||
__tablename__ = "partner_api_token"
|
||||
@ -3515,7 +3777,8 @@ class PartnerUser(Base, ModelMixin):
|
||||
index=True,
|
||||
)
|
||||
partner_id = sa.Column(
|
||||
sa.ForeignKey("partner.id", ondelete="cascade"), nullable=False, index=True
|
||||
sa.ForeignKey("partner.id", ondelete="cascade"),
|
||||
nullable=False,
|
||||
)
|
||||
external_user_id = sa.Column(sa.String(128), unique=False, nullable=False)
|
||||
partner_email = sa.Column(sa.String(255), unique=False, nullable=True)
|
||||
@ -3542,7 +3805,8 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
)
|
||||
|
||||
# when the partner subscription ends
|
||||
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
||||
end_at = sa.Column(ArrowType, nullable=True, index=True)
|
||||
lifetime = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||
|
||||
partner_user = orm.relationship(PartnerUser)
|
||||
|
||||
@ -3564,7 +3828,9 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
return None
|
||||
|
||||
def is_active(self):
|
||||
return self.end_at > arrow.now().shift(days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS)
|
||||
return self.lifetime or self.end_at > arrow.now().shift(
|
||||
days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS
|
||||
)
|
||||
|
||||
|
||||
# endregion
|
||||
@ -3595,6 +3861,8 @@ class NewsletterUser(Base, ModelMixin):
|
||||
user = orm.relationship(User)
|
||||
newsletter = orm.relationship(Newsletter)
|
||||
|
||||
__table_args__ = (sa.Index("ix_newsletter_user_user_id", "user_id"),)
|
||||
|
||||
|
||||
class ApiToCookieToken(Base, ModelMixin):
|
||||
__tablename__ = "api_cookie_token"
|
||||
@ -3605,8 +3873,102 @@ class ApiToCookieToken(Base, ModelMixin):
|
||||
user = orm.relationship(User)
|
||||
api_key = orm.relationship(ApiKey)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_api_to_cookie_token_api_key_id", "api_key_id"),
|
||||
sa.Index("ix_api_to_cookie_token_user_id", "user_id"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
code = secrets.token_urlsafe(32)
|
||||
|
||||
return super().create(code=code, **kwargs)
|
||||
|
||||
|
||||
class SyncEvent(Base, ModelMixin):
|
||||
"""This model holds the events that need to be sent to the webhook"""
|
||||
|
||||
__tablename__ = "sync_event"
|
||||
content = sa.Column(sa.LargeBinary, unique=False, nullable=False)
|
||||
taken_time = sa.Column(
|
||||
ArrowType, default=None, nullable=True, server_default=None, index=True
|
||||
)
|
||||
retry_count = sa.Column(sa.Integer, default=0, nullable=False, server_default="0")
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_sync_event_created_at", "created_at"),
|
||||
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
||||
)
|
||||
|
||||
def mark_as_taken(self, allow_taken_older_than: Optional[Arrow] = None) -> bool:
|
||||
try:
|
||||
taken_condition = ["taken_time IS NULL"]
|
||||
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||
if allow_taken_older_than:
|
||||
taken_condition.append("taken_time < :taken_older_than")
|
||||
args["taken_older_than"] = allow_taken_older_than.datetime
|
||||
sql_taken_condition = "({})".format(" OR ".join(taken_condition))
|
||||
sql = f"UPDATE sync_event SET taken_time = :taken_time WHERE id = :sync_event_id AND {sql_taken_condition}"
|
||||
res = Session.execute(sql, args)
|
||||
Session.commit()
|
||||
except ObjectDeletedError:
|
||||
return False
|
||||
|
||||
return res.rowcount > 0
|
||||
|
||||
@classmethod
|
||||
def get_dead_letter(cls, older_than: Arrow, max_retries: int) -> [SyncEvent]:
|
||||
return (
|
||||
SyncEvent.filter(
|
||||
(
|
||||
(
|
||||
SyncEvent.taken_time.isnot(None)
|
||||
& (SyncEvent.taken_time < older_than)
|
||||
)
|
||||
| (
|
||||
SyncEvent.taken_time.is_(None)
|
||||
& (SyncEvent.created_at < older_than)
|
||||
)
|
||||
)
|
||||
& (SyncEvent.retry_count < max_retries)
|
||||
)
|
||||
.order_by(SyncEvent.id)
|
||||
.limit(100)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
class AliasAuditLog(Base, ModelMixin):
|
||||
"""This model holds an audit log for all the actions performed to an alias"""
|
||||
|
||||
__tablename__ = "alias_audit_log"
|
||||
|
||||
user_id = sa.Column(sa.Integer, nullable=False)
|
||||
alias_id = sa.Column(sa.Integer, nullable=False)
|
||||
alias_email = sa.Column(sa.String(255), nullable=False)
|
||||
action = sa.Column(sa.String(255), nullable=False)
|
||||
message = sa.Column(sa.Text, default=None, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_alias_audit_log_user_id", "user_id"),
|
||||
sa.Index("ix_alias_audit_log_alias_id", "alias_id"),
|
||||
sa.Index("ix_alias_audit_log_alias_email", "alias_email"),
|
||||
sa.Index("ix_alias_audit_log_created_at", "created_at"),
|
||||
)
|
||||
|
||||
|
||||
class UserAuditLog(Base, ModelMixin):
|
||||
"""This model holds an audit log for all the actions performed by a user"""
|
||||
|
||||
__tablename__ = "user_audit_log"
|
||||
|
||||
user_id = sa.Column(sa.Integer, nullable=False)
|
||||
user_email = sa.Column(sa.String(255), nullable=False)
|
||||
action = sa.Column(sa.String(255), nullable=False)
|
||||
message = sa.Column(sa.Text, default=None, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_user_audit_log_user_id", "user_id"),
|
||||
sa.Index("ix_user_audit_log_user_email", "user_email"),
|
||||
sa.Index("ix_user_audit_log_created_at", "created_at"),
|
||||
)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from app.build_info import SHA1
|
||||
from app.build_info import SHA1, VERSION
|
||||
from app.monitor.base import monitor_bp
|
||||
|
||||
|
||||
@ -7,6 +7,11 @@ def git_sha1():
|
||||
return SHA1
|
||||
|
||||
|
||||
@monitor_bp.route("/version")
|
||||
def version():
|
||||
return VERSION
|
||||
|
||||
|
||||
@monitor_bp.route("/live")
|
||||
def live():
|
||||
return "live"
|
||||
|
8
app/app/monitor_utils.py
Normal file
8
app/app/monitor_utils.py
Normal file
@ -0,0 +1,8 @@
|
||||
from app.build_info import VERSION
|
||||
import newrelic.agent
|
||||
|
||||
|
||||
def send_version_event(service: str):
|
||||
newrelic.agent.record_custom_event(
|
||||
"ServiceVersion", {"service": service, "version": VERSION}
|
||||
)
|
@ -20,7 +20,7 @@ def final():
|
||||
if form.validate_on_submit():
|
||||
alias = Alias.get_by(email=form.email.data)
|
||||
if alias and alias.user_id == current_user.id:
|
||||
send_test_email_alias(alias.email, current_user.name)
|
||||
send_test_email_alias(current_user, alias.email)
|
||||
flash("An email is sent to your alias", "success")
|
||||
|
||||
return render_template(
|
||||
|
@ -1,7 +1,13 @@
|
||||
from app.onboarding.base import onboarding_bp
|
||||
from flask import render_template
|
||||
from flask import render_template, url_for, redirect
|
||||
|
||||
|
||||
@onboarding_bp.route("/", methods=["GET"])
|
||||
def index():
|
||||
return render_template("onboarding/index.html")
|
||||
# Do the redirect to ensure cookies are set because they are SameSite=lax/strict
|
||||
return redirect(url_for("onboarding.setup"))
|
||||
|
||||
|
||||
@onboarding_bp.route("/setup", methods=["GET"])
|
||||
def setup():
|
||||
return render_template("onboarding/setup.html")
|
||||
|
@ -27,6 +27,7 @@ def failed_payment(sub: Subscription, subscription_id: str):
|
||||
"SimpleLogin - your subscription has failed to be renewed",
|
||||
render(
|
||||
"transactional/subscription-cancel.txt",
|
||||
user=user,
|
||||
end_date=arrow.arrow.datetime.utcnow(),
|
||||
),
|
||||
)
|
||||
|
55
app/app/partner_user_utils.py
Normal file
55
app/app/partner_user_utils.py
Normal file
@ -0,0 +1,55 @@
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from arrow import Arrow
|
||||
|
||||
from app.constants import JobType
|
||||
from app.models import PartnerUser, PartnerSubscription, User, Job
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def create_partner_user(
|
||||
user: User, partner_id: int, partner_email: str, external_user_id: str
|
||||
) -> PartnerUser:
|
||||
instance = PartnerUser.create(
|
||||
user_id=user.id,
|
||||
partner_id=partner_id,
|
||||
partner_email=partner_email,
|
||||
external_user_id=external_user_id,
|
||||
)
|
||||
Job.create(
|
||||
name=JobType.SEND_ALIAS_CREATION_EVENTS.value,
|
||||
payload={"user_id": user.id},
|
||||
run_at=arrow.now(),
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.LinkAccount,
|
||||
message=f"Linked account to partner_id={partner_id} | partner_email={partner_email} | external_user_id={external_user_id}",
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
def create_partner_subscription(
|
||||
partner_user: PartnerUser,
|
||||
expiration: Optional[Arrow] = None,
|
||||
lifetime: bool = False,
|
||||
msg: Optional[str] = None,
|
||||
) -> PartnerSubscription:
|
||||
instance = PartnerSubscription.create(
|
||||
partner_user_id=partner_user.id,
|
||||
end_at=expiration,
|
||||
lifetime=lifetime,
|
||||
)
|
||||
|
||||
message = "User upgraded through partner subscription"
|
||||
if msg:
|
||||
message += f" | {msg}"
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=message,
|
||||
)
|
||||
|
||||
return instance
|
0
app/app/payments/__init__.py
Normal file
0
app/app/payments/__init__.py
Normal file
121
app/app/payments/coinbase.py
Normal file
121
app/app/payments/coinbase.py
Normal file
@ -0,0 +1,121 @@
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
|
||||
from coinbase_commerce.error import WebhookInvalidPayload, SignatureVerificationError
|
||||
from coinbase_commerce.webhook import Webhook
|
||||
from flask import Flask, request
|
||||
|
||||
from app.config import COINBASE_WEBHOOK_SECRET
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.log import LOG
|
||||
from app.models import CoinbaseSubscription, User
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def setup_coinbase_commerce(app: Flask):
|
||||
@app.route("/coinbase", methods=["POST"])
|
||||
def coinbase_webhook():
|
||||
# event payload
|
||||
request_data = request.data.decode("utf-8")
|
||||
# webhook signature
|
||||
request_sig = request.headers.get("X-CC-Webhook-Signature", None)
|
||||
|
||||
try:
|
||||
# signature verification and event object construction
|
||||
event = Webhook.construct_event(
|
||||
request_data, request_sig, COINBASE_WEBHOOK_SECRET
|
||||
)
|
||||
except (WebhookInvalidPayload, SignatureVerificationError) as e:
|
||||
LOG.e("Invalid Coinbase webhook")
|
||||
return str(e), 400
|
||||
|
||||
LOG.d("Coinbase event %s", event)
|
||||
|
||||
if event["type"] == "charge:confirmed":
|
||||
if handle_coinbase_event(event):
|
||||
return "success", 200
|
||||
else:
|
||||
return "error", 400
|
||||
|
||||
return "success", 200
|
||||
|
||||
|
||||
def handle_coinbase_event(event) -> bool:
|
||||
server_user_id = event["data"]["metadata"]["user_id"]
|
||||
try:
|
||||
user_id = int(server_user_id)
|
||||
except ValueError:
|
||||
user_id = int(float(server_user_id))
|
||||
|
||||
code = event["data"]["code"]
|
||||
user: Optional[User] = User.get(user_id)
|
||||
if not user:
|
||||
LOG.e("User not found %s", user_id)
|
||||
return False
|
||||
|
||||
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
|
||||
user_id=user_id
|
||||
)
|
||||
|
||||
if not coinbase_subscription:
|
||||
LOG.d("Create a coinbase subscription for %s", user)
|
||||
coinbase_subscription = CoinbaseSubscription.create(
|
||||
user_id=user_id, end_at=arrow.now().shift(years=1), code=code, commit=True
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message="Upgraded though Coinbase",
|
||||
commit=True,
|
||||
)
|
||||
send_email(
|
||||
user.email,
|
||||
"Your SimpleLogin account has been upgraded",
|
||||
render(
|
||||
"transactional/coinbase/new-subscription.txt",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
render(
|
||||
"transactional/coinbase/new-subscription.html",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
)
|
||||
else:
|
||||
if coinbase_subscription.code != code:
|
||||
LOG.d("Update code from %s to %s", coinbase_subscription.code, code)
|
||||
coinbase_subscription.code = code
|
||||
|
||||
if coinbase_subscription.is_active():
|
||||
coinbase_subscription.end_at = coinbase_subscription.end_at.shift(years=1)
|
||||
else: # already expired subscription
|
||||
coinbase_subscription.end_at = arrow.now().shift(years=1)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended coinbase subscription",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
"Your SimpleLogin account has been extended",
|
||||
render(
|
||||
"transactional/coinbase/extend-subscription.txt",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
render(
|
||||
"transactional/coinbase/extend-subscription.html",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
)
|
||||
execute_subscription_webhook(user)
|
||||
|
||||
return True
|
286
app/app/payments/paddle.py
Normal file
286
app/app/payments/paddle.py
Normal file
@ -0,0 +1,286 @@
|
||||
import arrow
|
||||
import json
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
|
||||
from flask import Flask, request
|
||||
|
||||
from app import paddle_utils, paddle_callback
|
||||
from app.config import (
|
||||
PADDLE_MONTHLY_PRODUCT_ID,
|
||||
PADDLE_MONTHLY_PRODUCT_IDS,
|
||||
PADDLE_YEARLY_PRODUCT_IDS,
|
||||
PADDLE_COUPON_ID,
|
||||
)
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.log import LOG
|
||||
from app.models import Subscription, PlanEnum, User, Coupon
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
def setup_paddle_callback(app: Flask):
|
||||
@app.route("/paddle", methods=["GET", "POST"])
|
||||
def paddle():
|
||||
LOG.d(f"paddle callback {request.form.get('alert_name')} {request.form}")
|
||||
|
||||
# make sure the request comes from Paddle
|
||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||
return "KO", 400
|
||||
|
||||
if (
|
||||
request.form.get("alert_name") == "subscription_created"
|
||||
): # new user subscribes
|
||||
# the passthrough is json encoded, e.g.
|
||||
# request.form.get("passthrough") = '{"user_id": 88 }'
|
||||
passthrough = json.loads(request.form.get("passthrough"))
|
||||
user_id = passthrough.get("user_id")
|
||||
user = User.get(user_id)
|
||||
|
||||
subscription_plan_id = int(request.form.get("subscription_plan_id"))
|
||||
|
||||
if subscription_plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
||||
plan = PlanEnum.monthly
|
||||
elif subscription_plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
||||
plan = PlanEnum.yearly
|
||||
else:
|
||||
LOG.e(
|
||||
"Unknown subscription_plan_id %s %s",
|
||||
subscription_plan_id,
|
||||
request.form,
|
||||
)
|
||||
return "No such subscription", 400
|
||||
|
||||
sub = Subscription.get_by(user_id=user.id)
|
||||
|
||||
if not sub:
|
||||
LOG.d(f"create a new Subscription for user {user}")
|
||||
Subscription.create(
|
||||
user_id=user.id,
|
||||
cancel_url=request.form.get("cancel_url"),
|
||||
update_url=request.form.get("update_url"),
|
||||
subscription_id=request.form.get("subscription_id"),
|
||||
event_time=arrow.now(),
|
||||
next_bill_date=arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date(),
|
||||
plan=plan,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message="Upgraded through Paddle",
|
||||
)
|
||||
else:
|
||||
LOG.d(f"Update an existing Subscription for user {user}")
|
||||
sub.cancel_url = request.form.get("cancel_url")
|
||||
sub.update_url = request.form.get("update_url")
|
||||
sub.subscription_id = request.form.get("subscription_id")
|
||||
sub.event_time = arrow.now()
|
||||
sub.next_bill_date = arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date()
|
||||
sub.plan = plan
|
||||
|
||||
# make sure to set the new plan as not-cancelled
|
||||
# in case user cancels a plan and subscribes a new plan
|
||||
sub.cancelled = False
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended Paddle subscription",
|
||||
)
|
||||
|
||||
execute_subscription_webhook(user)
|
||||
LOG.d("User %s upgrades!", user)
|
||||
|
||||
Session.commit()
|
||||
|
||||
elif request.form.get("alert_name") == "subscription_payment_succeeded":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
LOG.d("Update subscription %s", subscription_id)
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
# when user subscribes, the "subscription_payment_succeeded" can arrive BEFORE "subscription_created"
|
||||
# at that time, subscription object does not exist yet
|
||||
if sub:
|
||||
sub.event_time = arrow.now()
|
||||
sub.next_bill_date = arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date()
|
||||
|
||||
Session.commit()
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
elif request.form.get("alert_name") == "subscription_cancelled":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
if sub:
|
||||
# cancellation_effective_date should be the same as next_bill_date
|
||||
LOG.w(
|
||||
"Cancel subscription %s %s on %s, next bill date %s",
|
||||
subscription_id,
|
||||
sub.user,
|
||||
request.form.get("cancellation_effective_date"),
|
||||
sub.next_bill_date,
|
||||
)
|
||||
sub.event_time = arrow.now()
|
||||
|
||||
sub.cancelled = True
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.SubscriptionCancelled,
|
||||
message="Cancelled Paddle subscription",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
user = sub.user
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
"SimpleLogin - your subscription is canceled",
|
||||
render(
|
||||
"transactional/subscription-cancel.txt",
|
||||
user=user,
|
||||
end_date=request.form.get("cancellation_effective_date"),
|
||||
),
|
||||
)
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
else:
|
||||
# user might have deleted their account
|
||||
LOG.i(f"Cancel non-exist subscription {subscription_id}")
|
||||
return "OK"
|
||||
elif request.form.get("alert_name") == "subscription_updated":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
if sub:
|
||||
next_bill_date = request.form.get("next_bill_date")
|
||||
if not next_bill_date:
|
||||
paddle_callback.failed_payment(sub, subscription_id)
|
||||
return "OK"
|
||||
|
||||
LOG.d(
|
||||
"Update subscription %s %s on %s, next bill date %s",
|
||||
subscription_id,
|
||||
sub.user,
|
||||
request.form.get("cancellation_effective_date"),
|
||||
sub.next_bill_date,
|
||||
)
|
||||
if (
|
||||
int(request.form.get("subscription_plan_id"))
|
||||
== PADDLE_MONTHLY_PRODUCT_ID
|
||||
):
|
||||
plan = PlanEnum.monthly
|
||||
else:
|
||||
plan = PlanEnum.yearly
|
||||
|
||||
sub.cancel_url = request.form.get("cancel_url")
|
||||
sub.update_url = request.form.get("update_url")
|
||||
sub.event_time = arrow.now()
|
||||
sub.next_bill_date = arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date()
|
||||
sub.plan = plan
|
||||
|
||||
# make sure to set the new plan as not-cancelled
|
||||
sub.cancelled = False
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended Paddle subscription",
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
execute_subscription_webhook(sub.user)
|
||||
else:
|
||||
LOG.w(
|
||||
f"update non-exist subscription {subscription_id}. {request.form}"
|
||||
)
|
||||
return "No such subscription", 400
|
||||
elif request.form.get("alert_name") == "payment_refunded":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
LOG.d("Refund request for subscription %s", subscription_id)
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
|
||||
if sub:
|
||||
user = sub.user
|
||||
Subscription.delete(sub.id)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.SubscriptionCancelled,
|
||||
message="Paddle subscription cancelled as user requested a refund",
|
||||
)
|
||||
Session.commit()
|
||||
LOG.e("%s requests a refund", user)
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
LOG.d(
|
||||
"Handle subscription_payment_refunded for subscription %s",
|
||||
subscription_id,
|
||||
)
|
||||
|
||||
if not sub:
|
||||
LOG.w(
|
||||
"No such subscription for %s, payload %s",
|
||||
subscription_id,
|
||||
request.form,
|
||||
)
|
||||
return "No such subscription"
|
||||
|
||||
plan_id = int(request.form["subscription_plan_id"])
|
||||
if request.form["refund_type"] == "full":
|
||||
if plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
||||
LOG.d("subtract 1 month from next_bill_date %s", sub.next_bill_date)
|
||||
sub.next_bill_date = sub.next_bill_date - relativedelta(months=1)
|
||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
||||
Session.commit()
|
||||
elif plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
||||
LOG.d("subtract 1 year from next_bill_date %s", sub.next_bill_date)
|
||||
sub.next_bill_date = sub.next_bill_date - relativedelta(years=1)
|
||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
||||
Session.commit()
|
||||
else:
|
||||
LOG.e("Unknown plan_id %s", plan_id)
|
||||
else:
|
||||
LOG.w("partial subscription_payment_refunded, not handled")
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
return "OK"
|
||||
|
||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
||||
def paddle_coupon():
|
||||
LOG.d("paddle coupon callback %s", request.form)
|
||||
|
||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||
return "KO", 400
|
||||
|
||||
product_id = request.form.get("p_product_id")
|
||||
if product_id != PADDLE_COUPON_ID:
|
||||
LOG.e("product_id %s not match with %s", product_id, PADDLE_COUPON_ID)
|
||||
return "KO", 400
|
||||
|
||||
email = request.form.get("email")
|
||||
LOG.d("Paddle coupon request for %s", email)
|
||||
|
||||
coupon = Coupon.create(
|
||||
code=random_string(30),
|
||||
comment="For 1-year coupon",
|
||||
expires_date=arrow.now().shift(years=1, days=-1),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
return (
|
||||
f"Your 1-year coupon is <b>{coupon.code}</b> <br> "
|
||||
f"It's valid until <b>{coupon.expires_date.date().isoformat()}</b>"
|
||||
)
|
@ -16,6 +16,7 @@ PROTON_ERROR_CODE_HV_NEEDED = 9001
|
||||
|
||||
PLAN_FREE = 1
|
||||
PLAN_PREMIUM = 2
|
||||
PLAN_PREMIUM_LIFETIME = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -112,10 +113,13 @@ class HttpProtonClient(ProtonClient):
|
||||
if plan_value == PLAN_FREE:
|
||||
plan = SLPlan(type=SLPlanType.Free, expiration=None)
|
||||
elif plan_value == PLAN_PREMIUM:
|
||||
expiration = info.get("PlanExpiration", "1")
|
||||
plan = SLPlan(
|
||||
type=SLPlanType.Premium,
|
||||
expiration=Arrow.fromtimestamp(info["PlanExpiration"], tzinfo="utc"),
|
||||
expiration=Arrow.fromtimestamp(expiration, tzinfo="utc"),
|
||||
)
|
||||
elif plan_value == PLAN_PREMIUM_LIFETIME:
|
||||
plan = SLPlan(SLPlanType.PremiumLifetime, expiration=None)
|
||||
else:
|
||||
raise Exception(f"Invalid value for plan: {plan_value}")
|
||||
|
||||
|
@ -1,9 +1,8 @@
|
||||
from newrelic import agent
|
||||
from typing import Optional
|
||||
|
||||
from app.db import Session
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.models import Partner, PartnerUser, User
|
||||
from app.models import Partner
|
||||
|
||||
PROTON_PARTNER_NAME = "Proton"
|
||||
_PROTON_PARTNER: Optional[Partner] = None
|
||||
@ -22,14 +21,3 @@ def get_proton_partner() -> Partner:
|
||||
|
||||
def is_proton_partner(partner: Partner) -> bool:
|
||||
return partner.name == PROTON_PARTNER_NAME
|
||||
|
||||
|
||||
def perform_proton_account_unlink(current_user: User):
|
||||
proton_partner = get_proton_partner()
|
||||
partner_user = PartnerUser.get_by(
|
||||
user_id=current_user.id, partner_id=proton_partner.id
|
||||
)
|
||||
if partner_user is not None:
|
||||
PartnerUser.delete(partner_user.id)
|
||||
Session.commit()
|
||||
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
39
app/app/proton/proton_unlink.py
Normal file
39
app/app/proton/proton_unlink.py
Normal file
@ -0,0 +1,39 @@
|
||||
from newrelic import agent
|
||||
|
||||
from app.db import Session
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserUnlinked
|
||||
from app.log import LOG
|
||||
from app.models import User, PartnerUser
|
||||
from app.proton.proton_partner import get_proton_partner
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def can_unlink_proton_account(user: User) -> bool:
|
||||
return (user.flags & User.FLAG_CREATED_FROM_PARTNER) == 0
|
||||
|
||||
|
||||
def perform_proton_account_unlink(
|
||||
current_user: User, skip_check: bool = False
|
||||
) -> None | str:
|
||||
if not skip_check and not can_unlink_proton_account(current_user):
|
||||
return None
|
||||
proton_partner = get_proton_partner()
|
||||
partner_user = PartnerUser.get_by(
|
||||
user_id=current_user.id, partner_id=proton_partner.id
|
||||
)
|
||||
if partner_user is not None:
|
||||
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
partner_user.user, EventContent(user_unlinked=UserUnlinked())
|
||||
)
|
||||
PartnerUser.delete(partner_user.id)
|
||||
external_user_id = partner_user.external_user_id
|
||||
Session.commit()
|
||||
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
||||
return external_user_id
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user