Compare commits

..

45 Commits
4.46.4 ... main

Author SHA1 Message Date
89fad50529 4.66.1
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 7m6s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2025-03-04 12:00:09 +00:00
d09b3b992c 4.66.0
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 7m18s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2025-03-03 12:00:09 +00:00
ef9c09f76e 4.65.5
Some checks failed
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m17s
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 7m47s
Build-Release-Image / Merge-Images (push) Has been skipped
Build-Release-Image / Create-Release (push) Has been skipped
Build-Release-Image / Notify (push) Has been skipped
2025-02-22 12:00:08 +00:00
0fa4b1b7ee 4.65.4
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 12m30s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2025-02-11 12:00:08 +00:00
2904d04a2c 4.65.3
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 12m22s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2025-02-06 12:00:07 +00:00
a5801551d0 4.65.1
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 20m30s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2025-02-04 12:00:06 +00:00
9c2a35193c 4.64.4
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 2m50s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 20m44s
Build-Release-Image / Merge-Images (push) Successful in 26s
Build-Release-Image / Create-Release (push) Successful in 12s
Build-Release-Image / Notify (push) Successful in 17s
2025-01-28 12:00:06 +00:00
e47e5a5255 4.64.3
Some checks failed
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m8s
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 15m37s
Build-Release-Image / Merge-Images (push) Has been skipped
Build-Release-Image / Create-Release (push) Has been skipped
Build-Release-Image / Notify (push) Has been skipped
2025-01-27 12:00:07 +00:00
ed37325b32 4.64.1
Some checks failed
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 2m58s
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 17m22s
Build-Release-Image / Merge-Images (push) Has been skipped
Build-Release-Image / Create-Release (push) Has been skipped
Build-Release-Image / Notify (push) Has been skipped
2025-01-24 12:00:07 +00:00
dd6005ffdf 4.64.0
Some checks failed
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m25s
Build-Release-Image / Build-Image (linux/arm64) (push) Failing after 14m51s
Build-Release-Image / Merge-Images (push) Has been skipped
Build-Release-Image / Create-Release (push) Has been skipped
Build-Release-Image / Notify (push) Has been skipped
2025-01-21 12:00:08 +00:00
664cd32f81 4.63.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m54s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 23m12s
Build-Release-Image / Merge-Images (push) Successful in 46s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 3s
2025-01-20 12:00:06 +00:00
33f0eb6c41 4.62.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 4m44s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 5m31s
Build-Release-Image / Merge-Images (push) Successful in 46s
Build-Release-Image / Create-Release (push) Successful in 14s
Build-Release-Image / Notify (push) Successful in 2s
2024-12-20 12:00:08 +00:00
9fd2fa9a78 4.61.1
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m41s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 4m6s
Build-Release-Image / Merge-Images (push) Successful in 18s
Build-Release-Image / Create-Release (push) Successful in 11s
Build-Release-Image / Notify (push) Successful in 3s
2024-11-30 12:00:10 +00:00
3c77f8af4b 4.61.0
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m9s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 4m14s
Build-Release-Image / Merge-Images (push) Successful in 47s
Build-Release-Image / Create-Release (push) Successful in 16s
Build-Release-Image / Notify (push) Successful in 3s
2024-11-29 12:00:12 +00:00
545eeda79b 4.59.5
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m2s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m43s
Build-Release-Image / Merge-Images (push) Successful in 49s
Build-Release-Image / Create-Release (push) Successful in 21s
Build-Release-Image / Notify (push) Successful in 8s
2024-11-18 12:00:06 +00:00
01dba12ed0 4.59.3
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m43s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m57s
Build-Release-Image / Merge-Images (push) Successful in 53s
Build-Release-Image / Create-Release (push) Successful in 8s
Build-Release-Image / Notify (push) Successful in 3s
2024-11-16 12:00:07 +00:00
c872d43c3d 4.59.2
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m7s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 4m46s
Build-Release-Image / Merge-Images (push) Successful in 14s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 5s
2024-11-14 12:00:07 +00:00
3e6867bc17 4.58
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m7s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m49s
Build-Release-Image / Merge-Images (push) Successful in 15s
Build-Release-Image / Create-Release (push) Successful in 8s
Build-Release-Image / Notify (push) Successful in 3s
2024-11-07 12:00:06 +00:00
a829074584 4.57.2
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m6s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m48s
Build-Release-Image / Merge-Images (push) Successful in 20s
Build-Release-Image / Create-Release (push) Successful in 11s
Build-Release-Image / Notify (push) Successful in 2s
2024-11-06 12:00:08 +00:00
25834e8f61 4.56.3
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m15s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m45s
Build-Release-Image / Merge-Images (push) Successful in 15s
Build-Release-Image / Create-Release (push) Successful in 10s
Build-Release-Image / Notify (push) Successful in 21s
2024-11-05 12:00:07 +00:00
a62b43b7c4 4.56.1
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m30s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m37s
Build-Release-Image / Merge-Images (push) Successful in 22s
Build-Release-Image / Create-Release (push) Successful in 24s
Build-Release-Image / Notify (push) Successful in 3s
2024-10-25 12:00:05 +01:00
44fda2d94e 4.56.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m24s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m34s
Build-Release-Image / Merge-Images (push) Successful in 14s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 3s
2024-10-24 12:00:05 +01:00
bc48198bb1 4.55.1
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m28s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m31s
Build-Release-Image / Merge-Images (push) Successful in 16s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 4s
2024-10-19 12:00:05 +01:00
da6e56c4eb 4.55.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m43s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m10s
Build-Release-Image / Merge-Images (push) Successful in 27s
Build-Release-Image / Create-Release (push) Successful in 10s
Build-Release-Image / Notify (push) Successful in 3s
2024-10-18 12:00:06 +01:00
798b58529c 4.53.2
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m29s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m33s
Build-Release-Image / Merge-Images (push) Successful in 44s
Build-Release-Image / Create-Release (push) Successful in 7s
Build-Release-Image / Notify (push) Successful in 20s
2024-10-11 12:00:07 +01:00
3da6c983e1 4.53.1
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m13s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m54s
Build-Release-Image / Merge-Images (push) Successful in 16s
Build-Release-Image / Create-Release (push) Successful in 40s
Build-Release-Image / Notify (push) Successful in 5s
2024-10-09 12:00:06 +01:00
294232a329 4.52.1
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m56s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m45s
Build-Release-Image / Merge-Images (push) Successful in 22s
Build-Release-Image / Create-Release (push) Successful in 8s
Build-Release-Image / Notify (push) Successful in 3s
2024-10-02 12:00:06 +01:00
fae9d7bc17 4.52.0
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m44s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 4m31s
Build-Release-Image / Merge-Images (push) Successful in 23s
Build-Release-Image / Create-Release (push) Successful in 23s
Build-Release-Image / Notify (push) Successful in 17s
2024-10-01 12:00:06 +01:00
d666f5af3f 4.51.2
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m33s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m35s
Build-Release-Image / Merge-Images (push) Successful in 25s
Build-Release-Image / Create-Release (push) Successful in 10s
Build-Release-Image / Notify (push) Successful in 3s
2024-09-28 12:00:06 +01:00
556fae02d5 4.51.1
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m21s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m40s
Build-Release-Image / Merge-Images (push) Successful in 23s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 4s
2024-09-26 12:00:06 +01:00
fd4c67c3d1 4.51.0
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m30s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m31s
Build-Release-Image / Merge-Images (push) Successful in 11s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 2s
2024-09-25 12:00:07 +01:00
edef254529 4.50.0
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m43s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m54s
Build-Release-Image / Merge-Images (push) Successful in 23s
Build-Release-Image / Create-Release (push) Successful in 14s
Build-Release-Image / Notify (push) Successful in 3s
2024-09-19 12:00:06 +01:00
357f0cca57 4.49.10
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m7s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2024-09-13 12:00:28 +01:00
8ce90e27f7 4.49.9
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m4s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2024-09-07 12:00:06 +01:00
3ecc8d36f9 4.49.8
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m18s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2024-09-04 12:00:07 +01:00
14f4829fab 4.49.7
Some checks failed
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m21s
Build-Release-Image / Build-Image (linux/amd64) (push) Has been cancelled
Build-Release-Image / Merge-Images (push) Has been cancelled
Build-Release-Image / Create-Release (push) Has been cancelled
Build-Release-Image / Notify (push) Has been cancelled
2024-09-03 12:00:06 +01:00
63ac89e952 4.49.6
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m15s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m18s
Build-Release-Image / Merge-Images (push) Successful in 13s
Build-Release-Image / Create-Release (push) Successful in 10s
Build-Release-Image / Notify (push) Successful in 3s
2024-08-28 12:00:07 +01:00
8896f00124 4.49.5
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m16s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m21s
Build-Release-Image / Merge-Images (push) Successful in 11s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 2s
2024-08-26 12:00:07 +01:00
d313c94f77 4.49.4
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m6s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m35s
Build-Release-Image / Merge-Images (push) Successful in 14s
Build-Release-Image / Create-Release (push) Successful in 10s
Build-Release-Image / Notify (push) Successful in 3s
2024-08-24 12:00:07 +01:00
39fcf2e48f 4.49.3
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m11s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m17s
Build-Release-Image / Merge-Images (push) Successful in 13s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 4s
2024-08-23 12:00:07 +01:00
41a5a65f51 4.49.2
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m14s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m27s
Build-Release-Image / Merge-Images (push) Successful in 13s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 2s
2024-08-21 12:00:07 +01:00
1d0c7ec4a0 4.49.0
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m32s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m40s
Build-Release-Image / Merge-Images (push) Successful in 18s
Build-Release-Image / Create-Release (push) Successful in 10s
Build-Release-Image / Notify (push) Successful in 3s
2024-08-19 12:00:06 +01:00
4de5b8eb6d 4.48.2
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m9s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m14s
Build-Release-Image / Merge-Images (push) Successful in 24s
Build-Release-Image / Create-Release (push) Successful in 8s
Build-Release-Image / Notify (push) Successful in 3s
2024-08-09 12:00:06 +01:00
0942f5eba3 4.48.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 2m58s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m24s
Build-Release-Image / Merge-Images (push) Successful in 12s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 19s
2024-08-05 12:00:06 +01:00
dae6f64482 4.47.2
All checks were successful
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 3m33s
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m39s
Build-Release-Image / Merge-Images (push) Successful in 12s
Build-Release-Image / Create-Release (push) Successful in 8s
Build-Release-Image / Notify (push) Successful in 5s
2024-07-30 12:00:06 +01:00
314 changed files with 15368 additions and 4694 deletions

View File

@ -14,4 +14,4 @@ venv/
.venv .venv
.coverage .coverage
htmlcov htmlcov
.git/ .git/

View File

@ -1,6 +1,12 @@
name: Test and lint name: SimpleLogin actions
on: [push, pull_request] on:
push:
branches:
- master
tags:
- v*
pull_request:
jobs: jobs:
lint: lint:
@ -9,35 +15,29 @@ jobs:
- name: Check out repo - name: Check out repo
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install poetry - name: Install uv
run: pipx install poetry uses: astral-sh/setup-uv@v5
- uses: actions/setup-python@v4
with: with:
python-version: '3.10' # Install a specific version of uv.
cache: 'poetry' version: "0.5.21"
enable-cache: true
- name: Install OS dependencies - name: Install OS dependencies
if: ${{ matrix.python-version }} == '3.10'
run: | run: |
sudo apt update sudo apt update
sudo apt install -y libre2-dev libpq-dev sudo apt install -y libre2-dev libpq-dev
- name: Install dependencies - name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' if: steps.setup-uv.outputs.cache-hit != 'true'
run: poetry install --no-interaction run: uv sync --locked --all-extras
- name: Check formatting & linting - name: Check formatting & linting
run: | run: |
poetry run pre-commit run --all-files uv run pre-commit run --all-files
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: ["3.10"]
# service containers to run with `postgres-job` # service containers to run with `postgres-job`
services: services:
@ -69,23 +69,21 @@ jobs:
- name: Check out repo - name: Check out repo
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install poetry - name: Install uv
run: pipx install poetry uses: astral-sh/setup-uv@v5
- uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python-version }} # Install a specific version of uv.
cache: 'poetry' version: "0.5.21"
enable-cache: true
- name: Install OS dependencies - name: Install OS dependencies
if: ${{ matrix.python-version }} == '3.10'
run: | run: |
sudo apt update sudo apt update
sudo apt install -y libre2-dev libpq-dev sudo apt install -y libre2-dev libpq-dev
- name: Install dependencies - name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' if: steps.setup-uv.outputs.cache-hit != 'true'
run: poetry install --no-interaction run: uv sync --locked --all-extras
- name: Start Redis v6 - name: Start Redis v6
@ -95,21 +93,21 @@ jobs:
- name: Run db migration - name: Run db migration
run: | run: |
CONFIG=tests/test.env poetry run alembic upgrade head CONFIG=tests/test.env uv run alembic upgrade head
- name: Prepare version file - name: Prepare version file
run: | run: |
scripts/generate-build-info.sh ${{ github.sha }} scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
cat app/build_info.py cat app/build_info.py
- name: Test with pytest - name: Test with pytest
run: | run: |
poetry run pytest uv run pytest
env: env:
GITHUB_ACTIONS_TEST: true GITHUB_ACTIONS_TEST: true
- name: Archive code coverage results - name: Archive code coverage results
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v4
with: with:
name: code-coverage-report name: code-coverage-report
path: htmlcov path: htmlcov
@ -156,14 +154,14 @@ jobs:
- name: Prepare version file - name: Prepare version file
run: | run: |
scripts/generate-build-info.sh ${{ github.sha }} scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
cat app/build_info.py cat app/build_info.py
- name: Build image and publish to Docker Registry - name: Build image and publish to Docker Registry
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
context: . context: .
platforms: linux/amd64,linux/arm64 platforms: linux/amd64
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}

3
app/.gitignore vendored
View File

@ -11,8 +11,7 @@ db.sqlite-journal
static/upload static/upload
venv/ venv/
.venv .venv
.python-version
.coverage .coverage
htmlcov htmlcov
adhoc adhoc
.env.* .env.*

View File

@ -8,7 +8,7 @@ repos:
- id: check-yaml - id: check-yaml
- id: trailing-whitespace - id: trailing-whitespace
- repo: https://github.com/Riverside-Healthcare/djLint - repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.3.0 rev: v1.34.1
hooks: hooks:
- id: djlint-jinja - id: djlint-jinja
files: '.*\.html' files: '.*\.html'
@ -21,5 +21,4 @@ repos:
- id: ruff - id: ruff
args: [ --fix ] args: [ --fix ]
# Run the formatter. # Run the formatter.
- id: ruff-format - id: ruff-format

1
app/.python-version Normal file
View File

@ -0,0 +1 @@
3.12.8

View File

@ -20,15 +20,15 @@ SimpleLogin backend consists of 2 main components:
## Install dependencies ## Install dependencies
The project requires: The project requires:
- Python 3.7+ and [poetry](https://python-poetry.org/) to manage dependencies - Python 3.10 and uv to manage dependencies
- Node v10 for front-end. - Node v10 for front-end.
- Postgres 12+ - Postgres 13+
First, install all dependencies by running the following command. First, install all dependencies by running the following command.
Feel free to use `virtualenv` or similar tools to isolate development environment. Feel free to use `virtualenv` or similar tools to isolate development environment.
```bash ```bash
poetry install uv sync
``` ```
On Mac, sometimes you might need to install some other packages via `brew`: On Mac, sometimes you might need to install some other packages via `brew`:
@ -55,7 +55,7 @@ brew install -s re2 pybind11
We use pre-commit to run all our linting and static analysis checks. Please run We use pre-commit to run all our linting and static analysis checks. Please run
```bash ```bash
poetry run pre-commit install uv run pre-commit install
``` ```
To install it in your development environment. To install it in your development environment.
@ -160,25 +160,25 @@ Here are the small sum-ups of the directory structures and their roles:
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
``` ```
poetry run ruff format . uv run ruff format .
``` ```
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
```bash ```bash
poetry run flake8 uv run flake8
``` ```
For HTML templates, we use `djlint`. Before creating a pull request, please run For HTML templates, we use `djlint`. Before creating a pull request, please run
```bash ```bash
poetry run djlint --check templates uv run djlint --check templates
``` ```
If some files aren't properly formatted, you can format all files with If some files aren't properly formatted, you can format all files with
```bash ```bash
poetry run djlint --reformat . uv run djlint --reformat .
``` ```
## Test sending email ## Test sending email
@ -215,7 +215,7 @@ python email_handler.py
4) Send a test email 4) Send a test email
```bash ```bash
swaks --to e1@sl.local --from hey@google.com --server 127.0.0.1:20381 swaks --to e1@sl.lan --from hey@google.com --server 127.0.0.1:20381
``` ```
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email. Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
@ -223,6 +223,31 @@ Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you sho
## Job runner ## Job runner
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
```bash ```bash
python job_runner.py python job_runner.py
``` ```
# Setup for Mac
There are several ways to setup Python and manage the project dependencies on Mac. For info we have successfully used this setup on a Mac silicon:
```bash
# we haven't managed to make python 3.12 work
brew install python3.10
# make sure to update the PATH so python, pip point to Python3
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
# Although pipx is the recommended way to install uv,
# install pipx via brew will automatically install python 3.12
# and uv will then use python 3.12
# so we recommend using uv this way instead
curl -sSL https://install.python-uv.org | python3 -
uv install
# activate the virtualenv and you should be good to go!
source .venv/bin/activate
```

View File

@ -4,43 +4,47 @@ WORKDIR /code
COPY ./static/package*.json /code/static/ COPY ./static/package*.json /code/static/
RUN cd /code/static && npm ci RUN cd /code/static && npm ci
# Main image FROM --platform=linux/amd64 ubuntu:22.04
FROM python:3.10
ARG UV_VERSION="0.5.21"
ARG UV_HASH="e108c300eafae22ad8e6d94519605530f18f8762eb58d2b98a617edfb5d088fc"
# Keeps Python from generating .pyc files in the container # Keeps Python from generating .pyc files in the container
ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONDONTWRITEBYTECODE=1
# Turns off buffering for easier container logging # Turns off buffering for easier container logging
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED=1
# Add poetry to PATH
ENV PATH="${PATH}:/root/.local/bin"
WORKDIR /code WORKDIR /code
# Copy poetry files # Copy dependency files
COPY poetry.lock pyproject.toml ./ COPY pyproject.toml uv.lock .python-version ./
# Install and setup poetry # Install deps
RUN pip install -U pip \ RUN apt-get update \
&& apt-get update \ && apt-get install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev build-essential pkg-config cmake ninja-build bash clang \
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\ && curl -sSL "https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-x86_64-unknown-linux-gnu.tar.gz" > uv.tar.gz \
&& curl -sSL https://install.python-poetry.org | python3 - \ && echo "${UV_HASH} uv.tar.gz" | sha256sum -c - \
# Remove curl and netcat from the image && tar xf uv.tar.gz -C /tmp/ \
&& apt-get purge -y curl netcat-traditional \ && mv /tmp/uv-x86_64-unknown-linux-gnu/uv /usr/bin/uv \
# Run poetry && mv /tmp/uv-x86_64-unknown-linux-gnu/uvx /usr/bin/uvx \
&& poetry config virtualenvs.create false \ && rm -rf /tmp/uv* \
&& poetry install --no-interaction --no-ansi --no-root \ && rm -f uv.tar.gz \
# Clear apt cache \ && uv python install `cat .python-version` \
&& apt-get purge -y libre2-dev cmake ninja-build\ && uv sync --locked \
&& apt-get autoremove -y \
&& apt-get purge -y curl netcat-traditional build-essential pkg-config cmake ninja-build python3-dev clang\
&& apt-get autoremove -y \
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Copy code
COPY . .
# copy npm packages # copy npm packages
COPY --from=npm /code /code COPY --from=npm /code /code
# copy everything else into /code ENV PATH="/code/.venv/bin:$PATH"
COPY . .
EXPOSE 7777 EXPOSE 7777
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG #gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG

View File

@ -84,7 +84,7 @@ For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplici
### DNS ### DNS
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain. Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to force using absolute domain.
#### MX record #### MX record
@ -541,7 +541,7 @@ exit
Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations: Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations:
``` ```.env
DISABLE_REGISTRATION=1 DISABLE_REGISTRATION=1
DISABLE_ONBOARDING=true DISABLE_ONBOARDING=true
``` ```

View File

@ -7,8 +7,4 @@ If you want be up to date on security patches, make sure your SimpleLogin image
## Reporting a Vulnerability ## Reporting a Vulnerability
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io. If you want to report a vulnerability, please take a look at our bug bounty program at https://proton.me/security/bug-bounty.
We will review the potential threat and fix it as fast as we can.
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.

View File

@ -3,12 +3,17 @@ from dataclasses import dataclass
from enum import Enum from enum import Enum
from typing import Optional from typing import Optional
import sqlalchemy.exc
from arrow import Arrow from arrow import Arrow
from newrelic import agent from newrelic import agent
from psycopg2.errors import UniqueViolation
from sqlalchemy import or_ from sqlalchemy import or_
from app.db import Session from app.db import Session
from app.email_utils import send_welcome_email from app.email_utils import send_welcome_email
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
from app.partner_user_utils import create_partner_user, create_partner_subscription
from app.utils import sanitize_email, canonicalize_email from app.utils import sanitize_email, canonicalize_email
from app.errors import ( from app.errors import (
AccountAlreadyLinkedToAnotherPartnerException, AccountAlreadyLinkedToAnotherPartnerException,
@ -23,12 +28,14 @@ from app.models import (
User, User,
Alias, Alias,
) )
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import random_string from app.utils import random_string
class SLPlanType(Enum): class SLPlanType(Enum):
Free = 1 Free = 1
Premium = 2 Premium = 2
PremiumLifetime = 3
@dataclass @dataclass
@ -52,8 +59,26 @@ class LinkResult:
strategy: str strategy: str
def send_user_plan_changed_event(
partner_user: PartnerUser,
) -> UserPlanChanged:
subscription_end = partner_user.user.get_active_subscription_end(
include_partner_subscription=False
)
if partner_user.user.lifetime:
event = UserPlanChanged(lifetime=True)
elif subscription_end:
event = UserPlanChanged(plan_end_time=subscription_end.timestamp)
else:
event = UserPlanChanged(plan_end_time=None)
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
Session.flush()
return event
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan): def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id) sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
is_lifetime = plan.type == SLPlanType.PremiumLifetime
if plan.type == SLPlanType.Free: if plan.type == SLPlanType.Free:
if sub is not None: if sub is not None:
LOG.i( LOG.i(
@ -62,24 +87,37 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
PartnerSubscription.delete(sub.id) PartnerSubscription.delete(sub.id)
agent.record_custom_event("PlanChange", {"plan": "free"}) agent.record_custom_event("PlanChange", {"plan": "free"})
else: else:
end_time = plan.expiration
if plan.type == SLPlanType.PremiumLifetime:
end_time = None
if sub is None: if sub is None:
LOG.i( LOG.i(
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]" f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] with {end_time} / {is_lifetime}"
) )
PartnerSubscription.create( create_partner_subscription(
partner_user_id=partner_user.id, partner_user=partner_user,
end_at=plan.expiration, expiration=end_time,
lifetime=is_lifetime,
msg="Upgraded via partner. User did not have a previous partner subscription",
) )
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"}) agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
else: else:
if sub.end_at != plan.expiration: if sub.end_at != plan.expiration or sub.lifetime != is_lifetime:
LOG.i(
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
)
agent.record_custom_event( agent.record_custom_event(
"PlanChange", {"plan": "premium", "type": "extension"} "PlanChange", {"plan": "premium", "type": "extension"}
) )
sub.end_at = plan.expiration sub.end_at = plan.expiration if not is_lifetime else None
sub.lifetime = is_lifetime
LOG.i(
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] to {sub.end_at} / {sub.lifetime} "
)
emit_user_audit_log(
user=partner_user.user,
action=UserAuditLogAction.SubscriptionExtended,
message="Extended partner subscription",
)
Session.flush()
send_user_plan_changed_event(partner_user)
Session.commit() Session.commit()
@ -98,12 +136,13 @@ def ensure_partner_user_exists_for_user(
if res and res.partner_id != partner.id: if res and res.partner_id != partner.id:
raise AccountAlreadyLinkedToAnotherPartnerException() raise AccountAlreadyLinkedToAnotherPartnerException()
if not res: if not res:
res = PartnerUser.create( res = create_partner_user(
user_id=sl_user.id, user=sl_user,
partner_id=partner.id, partner_id=partner.id,
partner_email=link_request.email, partner_email=link_request.email,
external_user_id=link_request.external_user_id, external_user_id=link_request.external_user_id,
) )
Session.commit() Session.commit()
LOG.i( LOG.i(
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}" f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
@ -131,17 +170,59 @@ class ClientMergeStrategy(ABC):
class NewUserStrategy(ClientMergeStrategy): class NewUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult: def process(self) -> LinkResult:
# Will create a new SL User with a random password
canonical_email = canonicalize_email(self.link_request.email) canonical_email = canonicalize_email(self.link_request.email)
new_user = User.create( try:
email=canonical_email, # Will create a new SL User with a random password
name=self.link_request.name, new_user = User.create(
password=random_string(20), email=canonical_email,
activated=True, name=self.link_request.name,
from_partner=self.link_request.from_partner, password=random_string(20),
activated=True,
from_partner=self.link_request.from_partner,
)
self.create_partner_user(new_user)
Session.commit()
if not new_user.created_by_partner:
send_welcome_email(new_user)
agent.record_custom_event(
"PartnerUserCreation", {"partner": self.partner.name}
)
return LinkResult(
user=new_user,
strategy=self.__class__.__name__,
)
except (UniqueViolation, sqlalchemy.exc.IntegrityError) as e:
Session.rollback()
LOG.debug(f"Got the duplicate user error: {e}")
return self.create_missing_link(canonical_email)
def create_missing_link(self, canonical_email: str):
# If there's a unique key violation due to race conditions try to create only the partner if needed
partner_user = PartnerUser.get_by(
external_user_id=self.link_request.external_user_id,
partner_id=self.partner.id,
) )
partner_user = PartnerUser.create( if partner_user is None:
user_id=new_user.id, # Get the user by canonical email and if not by normal email
user = User.get_by(email=canonical_email) or User.get_by(
email=self.link_request.email
)
if not user:
raise RuntimeError(
"Tried to create only partner on UniqueViolation but cannot find the user"
)
partner_user = self.create_partner_user(user)
Session.commit()
return LinkResult(
user=partner_user.user, strategy=ExistingUnlinkedUserStrategy.__name__
)
def create_partner_user(self, new_user: User):
partner_user = create_partner_user(
user=new_user,
partner_id=self.partner.id, partner_id=self.partner.id,
external_user_id=self.link_request.external_user_id, external_user_id=self.link_request.external_user_id,
partner_email=self.link_request.email, partner_email=self.link_request.email,
@ -153,17 +234,7 @@ class NewUserStrategy(ClientMergeStrategy):
partner_user, partner_user,
self.link_request.plan, self.link_request.plan,
) )
Session.commit() return partner_user
if not new_user.created_by_partner:
send_welcome_email(new_user)
agent.record_custom_event("PartnerUserCreation", {"partner": self.partner.name})
return LinkResult(
user=new_user,
strategy=self.__class__.__name__,
)
class ExistingUnlinkedUserStrategy(ClientMergeStrategy): class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
@ -200,7 +271,7 @@ def get_login_strategy(
return ExistingUnlinkedUserStrategy(link_request, user, partner) return ExistingUnlinkedUserStrategy(link_request, user, partner)
def check_alias(email: str) -> bool: def check_alias(email: str):
alias = Alias.get_by(email=email) alias = Alias.get_by(email=email)
if alias is not None: if alias is not None:
raise AccountIsUsingAliasAsEmail() raise AccountIsUsingAliasAsEmail()
@ -275,10 +346,26 @@ def switch_already_linked_user(
LOG.i( LOG.i(
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}" f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
) )
emit_user_audit_log(
user=other_partner_user.user,
action=UserAuditLogAction.UnlinkAccount,
message=f"Deleting partner_user {other_partner_user.id} (external_user_id={other_partner_user.external_user_id} | partner_email={other_partner_user.partner_email}) from user {current_user.id}, as we received a new link request for the same partner",
)
PartnerUser.delete(other_partner_user.id) PartnerUser.delete(other_partner_user.id)
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}") LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
# Link this partner_user to the current user # Link this partner_user to the current user
emit_user_audit_log(
user=partner_user.user,
action=UserAuditLogAction.UnlinkAccount,
message=f"Unlinking from partner, as user will now be tied to another external account. old=(id={partner_user.user.id} | email={partner_user.user.email}) | new=(id={current_user.id} | email={current_user.email})",
)
partner_user.user_id = current_user.id partner_user.user_id = current_user.id
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.LinkAccount,
message=f"Linking user {current_user.id} ({current_user.email}) to partner_user:{partner_user.id} (external_user_id={partner_user.external_user_id} | partner_email={partner_user.partner_email})",
)
# Set plan # Set plan
set_plan_for_partner_user(partner_user, link_request.plan) set_plan_for_partner_user(partner_user, link_request.plan)
Session.commit() Session.commit()

View File

@ -1,18 +1,29 @@
from typing import Optional from __future__ import annotations
from typing import Optional, List
import arrow import arrow
import sqlalchemy import sqlalchemy
from flask_admin.model.template import EndpointLinkRowAction
from markupsafe import Markup
from app import models, s3
from flask import redirect, url_for, request, flash, Response from flask import redirect, url_for, request, flash, Response
from flask_admin import BaseView
from flask_admin import expose, AdminIndexView from flask_admin import expose, AdminIndexView
from flask_admin.actions import action from flask_admin.actions import action
from flask_admin.contrib import sqla from flask_admin.contrib import sqla
from flask_admin.form import SecureForm
from flask_admin.model.template import EndpointLinkRowAction
from flask_login import current_user from flask_login import current_user
from markupsafe import Markup
from app import models, s3, config
from app.custom_domain_validation import (
CustomDomainValidation,
DomainValidationResult,
ExpectedValidationRecords,
)
from app.db import Session from app.db import Session
from app.dns_utils import get_network_dns_client
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.models import ( from app.models import (
User, User,
ManualSubscription, ManualSubscription,
@ -27,8 +38,31 @@ from app.models import (
Alias, Alias,
Newsletter, Newsletter,
PADDLE_SUBSCRIPTION_GRACE_DAYS, PADDLE_SUBSCRIPTION_GRACE_DAYS,
Mailbox,
DeletedAlias,
DomainDeletedAlias,
PartnerUser,
AliasMailbox,
AliasAuditLog,
UserAuditLog,
CustomDomain,
) )
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
from app.proton.proton_unlink import perform_proton_account_unlink
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_date_formatter(view, context, model, name):
return model.created_at.format()
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class SLModelView(sqla.ModelView): class SLModelView(sqla.ModelView):
@ -92,14 +126,11 @@ class SLAdminIndexView(AdminIndexView):
if not current_user.is_authenticated or not current_user.is_admin: if not current_user.is_authenticated or not current_user.is_admin:
return redirect(url_for("auth.login", next=request.url)) return redirect(url_for("auth.login", next=request.url))
return redirect("/admin/user") return redirect(url_for("admin.email_search.index"))
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class UserAdmin(SLModelView): class UserAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["email", "id"] column_searchable_list = ["email", "id"]
column_exclude_list = [ column_exclude_list = [
"salt", "salt",
@ -118,6 +149,8 @@ class UserAdmin(SLModelView):
column_formatters = { column_formatters = {
"upgrade_channel": _user_upgrade_channel_formatter, "upgrade_channel": _user_upgrade_channel_formatter,
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
} }
@action( @action(
@ -329,32 +362,69 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
manual_sub.end_at = manual_sub.end_at.shift(years=1) manual_sub.end_at = manual_sub.end_at.shift(years=1)
else: else:
manual_sub.end_at = arrow.now().shift(years=1, days=1) manual_sub.end_at = arrow.now().shift(years=1, days=1)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.Upgrade,
message=f"Admin {current_user.email} extended manual subscription to user {user.email}",
)
EventDispatcher.send_event(
user=user,
content=EventContent(
user_plan_change=UserPlanChanged(
plan_end_time=manual_sub.end_at.timestamp
)
),
)
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success") flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
continue else:
emit_user_audit_log(
user=user,
action=UserAuditLogAction.Upgrade,
message=f"Admin {current_user.email} created manual subscription to user {user.email}",
)
manual_sub = ManualSubscription.create(
user_id=user.id,
end_at=arrow.now().shift(years=1, days=1),
comment=way,
is_giveaway=is_giveaway,
)
EventDispatcher.send_event(
user=user,
content=EventContent(
user_plan_change=UserPlanChanged(
plan_end_time=manual_sub.end_at.timestamp
)
),
)
ManualSubscription.create( flash(f"New {way} manual subscription for {user} is created", "success")
user_id=user.id,
end_at=arrow.now().shift(years=1, days=1),
comment=way,
is_giveaway=is_giveaway,
)
flash(f"New {way} manual subscription for {user} is created", "success")
Session.commit() Session.commit()
class EmailLogAdmin(SLModelView): class EmailLogAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id"] column_searchable_list = ["id"]
column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"] column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"]
can_edit = False can_edit = False
can_create = False can_create = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class AliasAdmin(SLModelView): class AliasAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "email", "mailbox.email"] column_searchable_list = ["id", "user.email", "email", "mailbox.email"]
column_filters = ["id", "user.email", "email", "mailbox.email"] column_filters = ["id", "user.email", "email", "mailbox.email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action( @action(
"disable_email_spoofing_check", "disable_email_spoofing_check",
"Disable email spoofing protection", "Disable email spoofing protection",
@ -377,9 +447,15 @@ class AliasAdmin(SLModelView):
class MailboxAdmin(SLModelView): class MailboxAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "email"] column_searchable_list = ["id", "user.email", "email"]
column_filters = ["id", "user.email", "email"] column_filters = ["id", "user.email", "email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
# class LifetimeCouponAdmin(SLModelView): # class LifetimeCouponAdmin(SLModelView):
# can_edit = True # can_edit = True
@ -387,28 +463,33 @@ class MailboxAdmin(SLModelView):
class CouponAdmin(SLModelView): class CouponAdmin(SLModelView):
form_base_class = SecureForm
can_edit = False can_edit = False
can_create = True can_create = True
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class ManualSubscriptionAdmin(SLModelView): class ManualSubscriptionAdmin(SLModelView):
form_base_class = SecureForm
can_edit = True can_edit = True
column_searchable_list = ["id", "user.email"] column_searchable_list = ["id", "user.email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action( @action(
"extend_1y", "extend_1y",
"Extend for 1 year", "Extend for 1 year",
"Extend 1 year more?", "Extend 1 year more?",
) )
def extend_1y(self, ids): def extend_1y(self, ids):
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)): self.__extend_manual_subscription(ids, msg="1 year", years=1)
ms.end_at = ms.end_at.shift(years=1)
flash(f"Extend subscription for 1 year for {ms.user}", "success")
AdminAuditLog.extend_subscription(
current_user.id, ms.user.id, ms.end_at, "1 year"
)
Session.commit()
@action( @action(
"extend_1m", "extend_1m",
@ -416,11 +497,26 @@ class ManualSubscriptionAdmin(SLModelView):
"Extend 1 month more?", "Extend 1 month more?",
) )
def extend_1m(self, ids): def extend_1m(self, ids):
self.__extend_manual_subscription(ids, msg="1 month", months=1)
def __extend_manual_subscription(self, ids: List[int], msg: str, **kwargs):
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)): for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
ms.end_at = ms.end_at.shift(months=1) sub: ManualSubscription = ms
flash(f"Extend subscription for 1 month for {ms.user}", "success") sub.end_at = sub.end_at.shift(**kwargs)
flash(f"Extend subscription for {msg} for {sub.user}", "success")
emit_user_audit_log(
user=sub.user,
action=UserAuditLogAction.Upgrade,
message=f"Admin {current_user.email} extended manual subscription for {msg} for {sub.user}",
)
AdminAuditLog.extend_subscription( AdminAuditLog.extend_subscription(
current_user.id, ms.user.id, ms.end_at, "1 month" current_user.id, sub.user.id, sub.end_at, msg
)
EventDispatcher.send_event(
user=sub.user,
content=EventContent(
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
),
) )
Session.commit() Session.commit()
@ -433,15 +529,27 @@ class ManualSubscriptionAdmin(SLModelView):
class CustomDomainAdmin(SLModelView): class CustomDomainAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["domain", "user.email", "user.id"] column_searchable_list = ["domain", "user.email", "user.id"]
column_exclude_list = ["ownership_txt_token"] column_exclude_list = ["ownership_txt_token"]
can_edit = False can_edit = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class ReferralAdmin(SLModelView): class ReferralAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "code", "name"] column_searchable_list = ["id", "user.email", "code", "name"]
column_filters = ["id", "user.email", "code", "name"] column_filters = ["id", "user.email", "code", "name"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
def scaffold_list_columns(self): def scaffold_list_columns(self):
ret = super().scaffold_list_columns() ret = super().scaffold_list_columns()
ret.insert(0, "nb_user") ret.insert(0, "nb_user")
@ -457,16 +565,8 @@ class ReferralAdmin(SLModelView):
# can_delete = True # can_delete = True
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_created_at_formatter(view, context, model, name):
return model.created_at.format()
class AdminAuditLogAdmin(SLModelView): class AdminAuditLogAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"] column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"]
column_filters = ["admin.id", "admin.email", "model_id", "created_at"] column_filters = ["admin.id", "admin.email", "model_id", "created_at"]
column_exclude_list = ["id"] column_exclude_list = ["id"]
@ -477,7 +577,8 @@ class AdminAuditLogAdmin(SLModelView):
column_formatters = { column_formatters = {
"action": _admin_action_formatter, "action": _admin_action_formatter,
"created_at": _admin_created_at_formatter, "created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
} }
@ -497,6 +598,7 @@ def _transactionalcomplaint_refused_email_id_formatter(view, context, model, nam
class ProviderComplaintAdmin(SLModelView): class ProviderComplaintAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.id", "created_at"] column_searchable_list = ["id", "user.id", "created_at"]
column_filters = ["user.id", "state"] column_filters = ["user.id", "state"]
column_hide_backrefs = False column_hide_backrefs = False
@ -505,8 +607,8 @@ class ProviderComplaintAdmin(SLModelView):
can_delete = False can_delete = False
column_formatters = { column_formatters = {
"created_at": _admin_created_at_formatter, "created_at": _admin_date_formatter,
"updated_at": _admin_created_at_formatter, "updated_at": _admin_date_formatter,
"state": _transactionalcomplaint_state_formatter, "state": _transactionalcomplaint_state_formatter,
"phase": _transactionalcomplaint_phase_formatter, "phase": _transactionalcomplaint_phase_formatter,
"refused_email": _transactionalcomplaint_refused_email_id_formatter, "refused_email": _transactionalcomplaint_refused_email_id_formatter,
@ -567,6 +669,7 @@ def _newsletter_html_formatter(view, context, model: Newsletter, name):
class NewsletterAdmin(SLModelView): class NewsletterAdmin(SLModelView):
form_base_class = SecureForm
list_template = "admin/model/newsletter-list.html" list_template = "admin/model/newsletter-list.html"
edit_template = "admin/model/newsletter-edit.html" edit_template = "admin/model/newsletter-edit.html"
edit_modal = False edit_modal = False
@ -648,6 +751,7 @@ class NewsletterAdmin(SLModelView):
class NewsletterUserAdmin(SLModelView): class NewsletterUserAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id"] column_searchable_list = ["id"]
column_filters = ["id", "user.email", "newsletter.subject"] column_filters = ["id", "user.email", "newsletter.subject"]
column_exclude_list = ["created_at", "updated_at", "id"] column_exclude_list = ["created_at", "updated_at", "id"]
@ -657,17 +761,303 @@ class NewsletterUserAdmin(SLModelView):
class DailyMetricAdmin(SLModelView): class DailyMetricAdmin(SLModelView):
form_base_class = SecureForm
column_exclude_list = ["created_at", "updated_at", "id"] column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True can_export = True
class MetricAdmin(SLModelView): class MetricAdmin(SLModelView):
form_base_class = SecureForm
column_exclude_list = ["created_at", "updated_at", "id"] column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True can_export = True
class InvalidMailboxDomainAdmin(SLModelView): class InvalidMailboxDomainAdmin(SLModelView):
form_base_class = SecureForm
can_create = True can_create = True
can_delete = True can_delete = True
class EmailSearchResult:
def __init__(self):
self.no_match: bool = True
self.alias: Optional[Alias] = None
self.alias_audit_log: Optional[List[AliasAuditLog]] = None
self.mailbox: List[Mailbox] = []
self.mailbox_count: int = 0
self.deleted_alias: Optional[DeletedAlias] = None
self.deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
self.domain_deleted_alias: Optional[DomainDeletedAlias] = None
self.domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
self.user: Optional[User] = None
self.user_audit_log: Optional[List[UserAuditLog]] = None
self.query: str
@staticmethod
def from_request_email(email: str) -> EmailSearchResult:
output = EmailSearchResult()
output.query = email
alias = Alias.get_by(email=email)
if alias:
output.alias = alias
output.alias_audit_log = (
AliasAuditLog.filter_by(alias_id=alias.id)
.order_by(AliasAuditLog.created_at.desc())
.all()
)
output.no_match = False
try:
user_id = int(email)
user = User.get(user_id)
except ValueError:
user = User.get_by(email=email)
if user:
output.user = user
output.user_audit_log = (
UserAuditLog.filter_by(user_id=user.id)
.order_by(UserAuditLog.created_at.desc())
.all()
)
output.no_match = False
user_audit_log = (
UserAuditLog.filter_by(user_email=email)
.order_by(UserAuditLog.created_at.desc())
.all()
)
if user_audit_log:
output.user_audit_log = user_audit_log
output.no_match = False
mailboxes = (
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
)
if mailboxes:
output.mailbox = mailboxes
output.mailbox_count = Mailbox.filter_by(email=email).count()
output.no_match = False
deleted_alias = DeletedAlias.get_by(email=email)
if deleted_alias:
output.deleted_alias = deleted_alias
output.deleted_alias_audit_log = (
AliasAuditLog.filter_by(alias_email=deleted_alias.email)
.order_by(AliasAuditLog.created_at.desc())
.all()
)
output.no_match = False
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
if domain_deleted_alias:
output.domain_deleted_alias = domain_deleted_alias
output.domain_deleted_alias_audit_log = (
AliasAuditLog.filter_by(alias_email=domain_deleted_alias.email)
.order_by(AliasAuditLog.created_at.desc())
.all()
)
output.no_match = False
return output
class EmailSearchHelpers:
@staticmethod
def mailbox_list(user: User) -> list[Mailbox]:
return (
Mailbox.filter_by(user_id=user.id)
.order_by(Mailbox.id.asc())
.limit(10)
.all()
)
@staticmethod
def mailbox_count(user: User) -> int:
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
@staticmethod
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
return (
Session.query(Mailbox)
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
.union(
Session.query(Mailbox)
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
.filter(AliasMailbox.alias_id == alias.id)
)
.order_by(Mailbox.id)
.limit(10)
.all()
)
@staticmethod
def alias_mailbox_count(alias: Alias) -> int:
return len(alias.mailboxes)
@staticmethod
def alias_list(user: User) -> list[Alias]:
return (
Alias.filter_by(user_id=user.id).order_by(Alias.id.desc()).limit(10).all()
)
@staticmethod
def alias_count(user: User) -> int:
return Alias.filter_by(user_id=user.id).count()
@staticmethod
def partner_user(user: User) -> Optional[PartnerUser]:
return PartnerUser.get_by(user_id=user.id)
class EmailSearchAdmin(BaseView):
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
@expose("/", methods=["GET", "POST"])
def index(self):
search = EmailSearchResult()
email = request.args.get("query")
if email is not None and len(email) > 0:
email = email.strip()
search = EmailSearchResult.from_request_email(email)
return self.render(
"admin/email_search.html",
email=email,
data=search,
helper=EmailSearchHelpers,
)
@expose("/partner_unlink", methods=["POST"])
def delete_partner_link(self):
user_id = request.form.get("user_id")
if not user_id:
flash("Missing user_id", "error")
return redirect(url_for("admin.email_search.index"))
try:
user_id = int(user_id)
except ValueError:
flash("Missing user_id", "error")
return redirect(url_for("admin.email_search.index", query=user_id))
user = User.get(user_id)
if user is None:
flash("User not found", "error")
return redirect(url_for("admin.email_search.index", query=user_id))
external_user_id = perform_proton_account_unlink(user, skip_check=True)
if not external_user_id:
flash("User unlinked", "success")
return redirect(url_for("admin.email_search.index", query=user_id))
AdminAuditLog.create(
admin_user_id=user.id,
model=User.__class__.__name__,
model_id=user.id,
action=AuditLogActionEnum.unlink_user.value,
data={"external_user_id": external_user_id},
)
Session.commit()
return redirect(url_for("admin.email_search.index", query=user_id))
class CustomDomainWithValidationData:
def __init__(self, domain: CustomDomain):
self.domain: CustomDomain = domain
self.ownership_expected: Optional[ExpectedValidationRecords] = None
self.ownership_validation: Optional[DomainValidationResult] = None
self.mx_expected: Optional[dict[int, ExpectedValidationRecords]] = None
self.mx_validation: Optional[DomainValidationResult] = None
self.spf_expected: Optional[ExpectedValidationRecords] = None
self.spf_validation: Optional[DomainValidationResult] = None
self.dkim_expected: {str: ExpectedValidationRecords} = {}
self.dkim_validation: {str: str} = {}
class CustomDomainSearchResult:
def __init__(self):
self.no_match: bool = False
self.user: Optional[User] = None
self.domains: list[CustomDomainWithValidationData] = []
@staticmethod
def from_user(user: Optional[User]) -> CustomDomainSearchResult:
out = CustomDomainSearchResult()
if user is None:
out.no_match = True
return out
out.user = user
dns_client = get_network_dns_client()
validator = CustomDomainValidation(
dkim_domain=config.EMAIL_DOMAIN,
partner_domains=config.PARTNER_DNS_CUSTOM_DOMAINS,
partner_domains_validation_prefixes=config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES,
dns_client=dns_client,
)
for custom_domain in user.custom_domains:
validation_data = CustomDomainWithValidationData(custom_domain)
if not custom_domain.ownership_verified:
validation_data.ownership_expected = (
validator.get_ownership_verification_record(custom_domain)
)
validation_data.ownership_validation = (
validator.validate_domain_ownership(custom_domain)
)
if not custom_domain.verified:
validation_data.mx_expected = validator.get_expected_mx_records(
custom_domain
)
validation_data.mx_validation = validator.validate_mx_records(
custom_domain
)
if not custom_domain.spf_verified:
validation_data.spf_expected = validator.get_expected_spf_record(
custom_domain
)
validation_data.spf_validation = validator.validate_spf_records(
custom_domain
)
if not custom_domain.dkim_verified:
validation_data.dkim_expected = validator.get_dkim_records(
custom_domain
)
validation_data.dkim_validation = validator.validate_dkim_records(
custom_domain
)
out.domains.append(validation_data)
return out
class CustomDomainSearchAdmin(BaseView):
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
@expose("/", methods=["GET", "POST"])
def index(self):
query = request.args.get("user")
if query is None:
search = CustomDomainSearchResult()
else:
try:
user_id = int(query)
user = User.get_by(id=user_id)
except ValueError:
user = User.get_by(email=query)
if user is None:
cd = CustomDomain.get_by(domain=query)
if cd is not None:
user = cd.user
search = CustomDomainSearchResult.from_user(user)
return self.render(
"admin/custom_domain_search.html",
data=search,
query=query,
)

View File

@ -0,0 +1,38 @@
from enum import Enum
from typing import Optional
from app.models import Alias, AliasAuditLog
class AliasAuditLogAction(Enum):
CreateAlias = "create"
ChangeAliasStatus = "change_status"
DeleteAlias = "delete"
UpdateAlias = "update"
InitiateTransferAlias = "initiate_transfer_alias"
AcceptTransferAlias = "accept_transfer_alias"
TransferredAlias = "transferred_alias"
ChangedMailboxes = "changed_mailboxes"
CreateContact = "create_contact"
UpdateContact = "update_contact"
DeleteContact = "delete_contact"
def emit_alias_audit_log(
alias: Alias,
action: AliasAuditLogAction,
message: str,
user_id: Optional[int] = None,
commit: bool = False,
):
AliasAuditLog.create(
user_id=user_id or alias.user_id,
alias_id=alias.id,
alias_email=alias.email,
action=action.value,
message=message,
commit=commit,
)

View File

@ -0,0 +1,62 @@
from dataclasses import dataclass
from enum import Enum
from typing import List, Optional
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
from app.db import Session
from app.models import Alias, AliasMailbox, Mailbox
_MAX_MAILBOXES_PER_ALIAS = 20
class CannotSetMailboxesForAliasCause(Enum):
Forbidden = "Forbidden"
EmptyMailboxes = "Must choose at least one mailbox"
TooManyMailboxes = "Too many mailboxes"
@dataclass
class SetMailboxesForAliasResult:
performed_change: bool
reason: Optional[CannotSetMailboxesForAliasCause]
def set_mailboxes_for_alias(
user_id: int, alias: Alias, mailbox_ids: List[int]
) -> Optional[CannotSetMailboxesForAliasCause]:
if len(mailbox_ids) == 0:
return CannotSetMailboxesForAliasCause.EmptyMailboxes
if len(mailbox_ids) > _MAX_MAILBOXES_PER_ALIAS:
return CannotSetMailboxesForAliasCause.TooManyMailboxes
mailboxes = (
Session.query(Mailbox)
.filter(
Mailbox.id.in_(mailbox_ids),
Mailbox.user_id == user_id,
Mailbox.verified == True, # noqa: E712
)
.order_by(Mailbox.id.asc())
.all()
)
if len(mailboxes) != len(mailbox_ids):
return CannotSetMailboxesForAliasCause.Forbidden
# first remove all existing alias-mailboxes links
AliasMailbox.filter_by(alias_id=alias.id).delete()
Session.flush()
# then add all new mailboxes, being the first the one associated with the alias
for i, mailbox in enumerate(mailboxes):
if i == 0:
alias.mailbox_id = mailboxes[0].id
else:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.ChangedMailboxes,
message=",".join([f"{mailbox.id} ({mailbox.email})" for mailbox in mailboxes]),
)
return None

View File

@ -58,32 +58,34 @@ def verify_prefix_suffix(
# alias_domain must be either one of user custom domains or built-in domains # alias_domain must be either one of user custom domains or built-in domains
if alias_domain not in user.available_alias_domains(alias_options=alias_options): if alias_domain not in user.available_alias_domains(alias_options=alias_options):
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user) LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
return False return False
# SimpleLogin domain case: # SimpleLogin domain case:
# 1) alias_suffix must start with "." and # 1) alias_suffix must start with "." and
# 2) alias_domain_prefix must come from the word list # 2) alias_domain_prefix must come from the word list
available_sl_domains = [
sl_domain.domain
for sl_domain in user.get_sl_domains(alias_options=alias_options)
]
if ( if (
alias_domain in user.available_sl_domains(alias_options=alias_options) alias_domain in available_sl_domains
and alias_domain not in user_custom_domains and alias_domain not in user_custom_domains
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty # when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
and not config.DISABLE_ALIAS_SUFFIX and not config.DISABLE_ALIAS_SUFFIX
): ):
if not alias_domain_prefix.startswith("."): if not alias_domain_prefix.startswith("."):
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix) LOG.i("User %s submits a wrong alias suffix %s", user, alias_suffix)
return False return False
else: else:
if alias_domain not in user_custom_domains: if alias_domain not in user_custom_domains:
if not config.DISABLE_ALIAS_SUFFIX: if not config.DISABLE_ALIAS_SUFFIX:
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user) LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
return False return False
if alias_domain not in user.available_sl_domains( if alias_domain not in available_sl_domains:
alias_options=alias_options LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
):
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False return False
return True return True

View File

@ -1,12 +1,14 @@
import csv import csv
from io import StringIO from io import StringIO
import re import re
from dataclasses import dataclass
from typing import Optional, Tuple from typing import Optional, Tuple
from email_validator import validate_email, EmailNotValidError from email_validator import validate_email, EmailNotValidError
from sqlalchemy.exc import IntegrityError, DataError from sqlalchemy.exc import IntegrityError, DataError
from flask import make_response from flask import make_response
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
from app.config import ( from app.config import (
BOUNCE_PREFIX_FOR_REPLY_PHASE, BOUNCE_PREFIX_FOR_REPLY_PHASE,
BOUNCE_PREFIX, BOUNCE_PREFIX,
@ -23,6 +25,7 @@ from app.email_utils import (
send_cannot_create_domain_alias, send_cannot_create_domain_alias,
send_email, send_email,
render, render,
sl_formataddr,
) )
from app.errors import AliasInTrashError from app.errors import AliasInTrashError
from app.events.event_dispatcher import EventDispatcher from app.events.event_dispatcher import EventDispatcher
@ -30,6 +33,7 @@ from app.events.generated.event_pb2 import (
AliasDeleted, AliasDeleted,
AliasStatusChanged, AliasStatusChanged,
EventContent, EventContent,
AliasCreated,
) )
from app.log import LOG from app.log import LOG
from app.models import ( from app.models import (
@ -63,12 +67,16 @@ def get_user_if_alias_would_auto_create(
# Prevent addresses with unicode characters (🤯) in them for now. # Prevent addresses with unicode characters (🤯) in them for now.
validate_email(address, check_deliverability=False, allow_smtputf8=False) validate_email(address, check_deliverability=False, allow_smtputf8=False)
except EmailNotValidError: except EmailNotValidError:
LOG.i(f"Not creating alias for {address} because email is invalid")
return None return None
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain( domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
address, notify_user=notify_user address, notify_user=notify_user
) )
if DomainDeletedAlias.get_by(email=address): if DomainDeletedAlias.get_by(email=address):
LOG.i(
f"Not creating alias for {address} because it was previously deleted for this domain"
)
return None return None
if domain_and_rule: if domain_and_rule:
return domain_and_rule[0].user return domain_and_rule[0].user
@ -93,6 +101,9 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain) custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain)
if not custom_domain: if not custom_domain:
LOG.i(
f"Cannot auto-create custom domain alias for {address} because there's no custom domain for {alias_domain}"
)
return None return None
user: User = custom_domain.user user: User = custom_domain.user
@ -108,6 +119,9 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
if not custom_domain.catch_all: if not custom_domain.catch_all:
if len(custom_domain.auto_create_rules) == 0: if len(custom_domain.auto_create_rules) == 0:
LOG.i(
f"Cannot create alias {address} for domain {custom_domain} because it has no catch-all and no rules"
)
return None return None
local = get_email_local_part(address) local = get_email_local_part(address)
@ -121,7 +135,7 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
) )
return custom_domain, rule return custom_domain, rule
else: # no rule passes else: # no rule passes
LOG.d("no rule passed to create %s", local) LOG.d(f"No rule matches auto-create {address} for domain {custom_domain}")
return None return None
LOG.d("Create alias via catchall") LOG.d("Create alias via catchall")
@ -148,6 +162,7 @@ def check_if_alias_can_be_auto_created_for_a_directory(
sep = "#" sep = "#"
else: else:
# if there's no directory separator in the alias, no way to auto-create it # if there's no directory separator in the alias, no way to auto-create it
LOG.info(f"Cannot auto-create {address} since it has no directory separator")
return None return None
directory_name = address[: address.find(sep)] directory_name = address[: address.find(sep)]
@ -155,6 +170,9 @@ def check_if_alias_can_be_auto_created_for_a_directory(
directory = Directory.get_by(name=directory_name) directory = Directory.get_by(name=directory_name)
if not directory: if not directory:
LOG.info(
f"Cannot auto-create {address} because there is no directory for {directory_name}"
)
return None return None
user: User = directory.user user: User = directory.user
@ -163,12 +181,17 @@ def check_if_alias_can_be_auto_created_for_a_directory(
return None return None
if not user.can_create_new_alias(): if not user.can_create_new_alias():
LOG.d(f"{user} can't create new directory alias {address}") LOG.d(
f"{user} can't create new directory alias {address} because user cannot create aliases"
)
if notify_user: if notify_user:
send_cannot_create_directory_alias(user, address, directory_name) send_cannot_create_directory_alias(user, address, directory_name)
return None return None
if directory.disabled: if directory.disabled:
LOG.d(
f"{user} can't create new directory alias {address} bcause directory is disabled"
)
if notify_user: if notify_user:
send_cannot_create_directory_alias_disabled(user, address, directory_name) send_cannot_create_directory_alias_disabled(user, address, directory_name)
return None return None
@ -311,7 +334,10 @@ def try_auto_create_via_domain(address: str) -> Optional[Alias]:
def delete_alias( def delete_alias(
alias: Alias, user: User, reason: AliasDeleteReason = AliasDeleteReason.Unspecified alias: Alias,
user: User,
reason: AliasDeleteReason = AliasDeleteReason.Unspecified,
commit: bool = False,
): ):
""" """
Delete an alias and add it to either global or domain trash Delete an alias and add it to either global or domain trash
@ -341,12 +367,21 @@ def delete_alias(
Session.commit() Session.commit()
LOG.i(f"Moving {alias} to global trash {deleted_alias}") LOG.i(f"Moving {alias} to global trash {deleted_alias}")
alias_id = alias.id
alias_email = alias.email
emit_alias_audit_log(
alias, AliasAuditLogAction.DeleteAlias, "Alias deleted by user action"
)
Alias.filter(Alias.id == alias.id).delete() Alias.filter(Alias.id == alias.id).delete()
Session.commit() Session.commit()
EventDispatcher.send_event( EventDispatcher.send_event(
user, EventContent(alias_deleted=AliasDeleted(alias_id=alias.id)) user,
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
) )
if commit:
Session.commit()
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]: def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
@ -420,7 +455,7 @@ def alias_export_csv(user, csv_direct_export=False):
return output return output
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]): def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
# cannot transfer alias which is used for receiving newsletter # cannot transfer alias which is used for receiving newsletter
if User.get_by(newsletter_alias_id=alias.id): if User.get_by(newsletter_alias_id=alias.id):
raise Exception("Cannot transfer alias that's used to receive newsletter") raise Exception("Cannot transfer alias that's used to receive newsletter")
@ -474,17 +509,90 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
alias.disable_pgp = False alias.disable_pgp = False
alias.pinned = False alias.pinned = False
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.TransferredAlias,
message=f"Lost ownership of alias due to alias transfer confirmed. New owner is {new_user.id}",
user_id=old_user.id,
)
EventDispatcher.send_event(
old_user,
EventContent(
alias_deleted=AliasDeleted(
id=alias.id,
email=alias.email,
)
),
)
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.AcceptTransferAlias,
message=f"Accepted alias transfer from user {old_user.id}",
user_id=new_user.id,
)
EventDispatcher.send_event(
new_user,
EventContent(
alias_created=AliasCreated(
id=alias.id,
email=alias.email,
note=alias.note,
enabled=alias.enabled,
created_at=int(alias.created_at.timestamp),
)
),
)
Session.commit() Session.commit()
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False): def change_alias_status(
alias: Alias, enabled: bool, message: Optional[str] = None, commit: bool = False
):
LOG.i(f"Changing alias {alias} enabled to {enabled}") LOG.i(f"Changing alias {alias} enabled to {enabled}")
alias.enabled = enabled alias.enabled = enabled
event = AliasStatusChanged( event = AliasStatusChanged(
alias_id=alias.id, alias_email=alias.email, enabled=enabled id=alias.id,
email=alias.email,
enabled=enabled,
created_at=int(alias.created_at.timestamp),
) )
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event)) EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
audit_log_message = f"Set alias status to {enabled}"
if message is not None:
audit_log_message += f". {message}"
emit_alias_audit_log(
alias, AliasAuditLogAction.ChangeAliasStatus, audit_log_message
)
if commit: if commit:
Session.commit() Session.commit()
@dataclass
class AliasRecipientName:
name: str
message: Optional[str] = None
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
"""
Logic:
1. If alias has name, use it
2. If alias has custom domain, and custom domain has name, use it
3. Otherwise, use the alias email as the recipient
"""
if alias.name:
return AliasRecipientName(
name=sl_formataddr((alias.name, alias.email)),
message=f"Put alias name {alias.name} in from header",
)
elif alias.custom_domain:
if alias.custom_domain.name:
return AliasRecipientName(
name=sl_formataddr((alias.custom_domain.name, alias.email)),
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
)
return AliasRecipientName(name=alias.email)

View File

@ -191,15 +191,8 @@ def get_alias_infos_with_pagination_v3(
q = q.order_by(Alias.email.desc()) q = q.order_by(Alias.email.desc())
else: else:
# default sorting # default sorting
latest_activity = case(
[
(Alias.created_at > EmailLog.created_at, Alias.created_at),
(Alias.created_at < EmailLog.created_at, EmailLog.created_at),
],
else_=Alias.created_at,
)
q = q.order_by(Alias.pinned.desc()) q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc()) q = q.order_by(func.greatest(Alias.created_at, EmailLog.created_at).desc())
q = q.limit(page_limit).offset(page_id * page_size) q = q.limit(page_limit).offset(page_id * page_size)

View File

@ -1,9 +1,13 @@
from typing import Optional
from deprecated import deprecated from deprecated import deprecated
from flask import g from flask import g
from flask import jsonify from flask import jsonify
from flask import request from flask import request
from app import alias_utils from app import alias_utils
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
from app.alias_mailbox_utils import set_mailboxes_for_alias
from app.api.base import api_bp, require_api_auth from app.api.base import api_bp, require_api_auth
from app.api.serializer import ( from app.api.serializer import (
AliasInfo, AliasInfo,
@ -26,7 +30,7 @@ from app.errors import (
) )
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import Alias, Contact, Mailbox, AliasMailbox, AliasDeleteReason from app.models import Alias, Contact, Mailbox, AliasDeleteReason
@deprecated @deprecated
@ -185,7 +189,11 @@ def toggle_alias(alias_id):
if not alias or alias.user_id != user.id: if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403 return jsonify(error="Forbidden"), 403
alias_utils.change_alias_status(alias, enabled=not alias.enabled) alias_utils.change_alias_status(
alias,
enabled=not alias.enabled,
message=f"Set enabled={not alias.enabled} via API",
)
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}") LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
Session.commit() Session.commit()
@ -272,10 +280,12 @@ def update_alias(alias_id):
if not alias or alias.user_id != user.id: if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403 return jsonify(error="Forbidden"), 403
changed_fields = []
changed = False changed = False
if "note" in data: if "note" in data:
new_note = data.get("note") new_note = data.get("note")
alias.note = new_note alias.note = new_note
changed_fields.append("note")
changed = True changed = True
if "mailbox_id" in data: if "mailbox_id" in data:
@ -285,35 +295,22 @@ def update_alias(alias_id):
return jsonify(error="Forbidden"), 400 return jsonify(error="Forbidden"), 400
alias.mailbox_id = mailbox_id alias.mailbox_id = mailbox_id
changed_fields.append(f"mailbox_id ({mailbox_id})")
changed = True changed = True
if "mailbox_ids" in data: if "mailbox_ids" in data:
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")] try:
mailboxes: [Mailbox] = [] mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
except ValueError:
# check if all mailboxes belong to user return jsonify(error="Invalid mailbox_id"), 400
for mailbox_id in mailbox_ids: err = set_mailboxes_for_alias(
mailbox = Mailbox.get(mailbox_id) user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
if not mailbox or mailbox.user_id != user.id or not mailbox.verified: )
return jsonify(error="Forbidden"), 400 if err:
mailboxes.append(mailbox) return jsonify(error=err.value), 400
if not mailboxes:
return jsonify(error="Must choose at least one mailbox"), 400
# <<< update alias mailboxes >>>
# first remove all existing alias-mailboxes links
AliasMailbox.filter_by(alias_id=alias.id).delete()
Session.flush()
# then add all new mailboxes
for i, mailbox in enumerate(mailboxes):
if i == 0:
alias.mailbox_id = mailboxes[0].id
else:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
# <<< END update alias mailboxes >>>
mailbox_ids_string = ",".join(map(str, mailbox_ids))
changed_fields.append(f"mailbox_ids ({mailbox_ids_string})")
changed = True changed = True
if "name" in data: if "name" in data:
@ -325,17 +322,26 @@ def update_alias(alias_id):
if new_name: if new_name:
new_name = new_name.replace("\n", "") new_name = new_name.replace("\n", "")
alias.name = new_name alias.name = new_name
changed_fields.append("name")
changed = True changed = True
if "disable_pgp" in data: if "disable_pgp" in data:
alias.disable_pgp = data.get("disable_pgp") alias.disable_pgp = data.get("disable_pgp")
changed_fields.append("disable_pgp")
changed = True changed = True
if "pinned" in data: if "pinned" in data:
alias.pinned = data.get("pinned") alias.pinned = data.get("pinned")
changed_fields.append("pinned")
changed = True changed = True
if changed: if changed:
changed_fields_string = ",".join(changed_fields)
emit_alias_audit_log(
alias,
AliasAuditLogAction.UpdateAlias,
f"Alias fields updated ({changed_fields_string})",
)
Session.commit() Session.commit()
return jsonify(ok=True), 200 return jsonify(ok=True), 200
@ -416,15 +422,14 @@ def create_contact_route(alias_id):
if not data: if not data:
return jsonify(error="request body cannot be empty"), 400 return jsonify(error="request body cannot be empty"), 400
alias: Alias = Alias.get(alias_id) alias: Optional[Alias] = Alias.get_by(id=alias_id, user_id=g.user.id)
if not alias:
if alias.user_id != g.user.id:
return jsonify(error="Forbidden"), 403 return jsonify(error="Forbidden"), 403
contact_address = data.get("contact") contact_address = data.get("contact")
try: try:
contact = create_contact(g.user, alias, contact_address) contact = create_contact(alias, contact_address)
except ErrContactErrorUpgradeNeeded as err: except ErrContactErrorUpgradeNeeded as err:
return jsonify(error=err.error_for_user()), 403 return jsonify(error=err.error_for_user()), 403
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err: except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
@ -446,11 +451,16 @@ def delete_contact(contact_id):
200 200
""" """
user = g.user user = g.user
contact = Contact.get(contact_id) contact: Optional[Contact] = Contact.get(contact_id)
if not contact or contact.alias.user_id != user.id: if not contact or contact.alias.user_id != user.id:
return jsonify(error="Forbidden"), 403 return jsonify(error="Forbidden"), 403
emit_alias_audit_log(
alias=contact.alias,
action=AliasAuditLogAction.DeleteContact,
message=f"Deleted contact {contact_id} ({contact.email})",
)
Contact.delete(contact_id) Contact.delete(contact_id)
Session.commit() Session.commit()
@ -468,12 +478,17 @@ def toggle_contact(contact_id):
200 200
""" """
user = g.user user = g.user
contact = Contact.get(contact_id) contact: Optional[Contact] = Contact.get(contact_id)
if not contact or contact.alias.user_id != user.id: if not contact or contact.alias.user_id != user.id:
return jsonify(error="Forbidden"), 403 return jsonify(error="Forbidden"), 403
contact.block_forward = not contact.block_forward contact.block_forward = not contact.block_forward
emit_alias_audit_log(
alias=contact.alias,
action=AliasAuditLogAction.UpdateContact,
message=f"Set contact state {contact.id} {contact.email} -> {contact.website_email} to blocked {contact.block_forward}",
)
Session.commit() Session.commit()
return jsonify(block_forward=contact.block_forward), 200 return jsonify(block_forward=contact.block_forward), 200

View File

@ -1,7 +1,6 @@
import secrets import secrets
import string import string
import facebook
import google.oauth2.credentials import google.oauth2.credentials
import googleapiclient.discovery import googleapiclient.discovery
from flask import jsonify, request from flask import jsonify, request
@ -23,6 +22,7 @@ from app.events.auth_event import LoginEvent, RegisterEvent
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import User, ApiKey, SocialAuth, AccountActivation from app.models import User, ApiKey, SocialAuth, AccountActivation
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import sanitize_email, canonicalize_email from app.utils import sanitize_email, canonicalize_email
@ -52,8 +52,12 @@ def auth_login():
password = data.get("password") password = data.get("password")
device = data.get("device") device = data.get("device")
email = sanitize_email(data.get("email")) email = data.get("email")
canonical_email = canonicalize_email(data.get("email")) if not email:
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
return jsonify(error="Email or password incorrect"), 400
email = sanitize_email(email)
canonical_email = canonicalize_email(email)
user = User.get_by(email=email) or User.get_by(email=canonical_email) user = User.get_by(email=email) or User.get_by(email=canonical_email)
@ -183,6 +187,11 @@ def auth_activate():
LOG.d("activate user %s", user) LOG.d("activate user %s", user)
user.activated = True user.activated = True
emit_user_audit_log(
user=user,
action=UserAuditLogAction.ActivateUser,
message=f"User has been activated: {user.email}",
)
AccountActivation.delete(account_activation.id) AccountActivation.delete(account_activation.id)
Session.commit() Session.commit()
@ -251,6 +260,8 @@ def auth_facebook():
} }
""" """
import facebook
data = request.get_json() data = request.get_json()
if not data: if not data:
return jsonify(error="request body cannot be empty"), 400 return jsonify(error="request body cannot be empty"), 400

View File

@ -2,8 +2,10 @@ from flask import g, request
from flask import jsonify from flask import jsonify
from app.api.base import api_bp, require_api_auth from app.api.base import api_bp, require_api_auth
from app.custom_domain_utils import set_custom_domain_mailboxes
from app.db import Session from app.db import Session
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox from app.log import LOG
from app.models import CustomDomain, DomainDeletedAlias
def custom_domain_to_dict(custom_domain: CustomDomain): def custom_domain_to_dict(custom_domain: CustomDomain):
@ -100,23 +102,14 @@ def update_custom_domain(custom_domain_id):
if "mailbox_ids" in data: if "mailbox_ids" in data:
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")] mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
if mailbox_ids: result = set_custom_domain_mailboxes(user.id, custom_domain, mailbox_ids)
# check if mailbox is not tempered with if result.success:
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
return jsonify(error="Forbidden"), 400
mailboxes.append(mailbox)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
changed = True changed = True
else:
LOG.info(
f"Prevented from updating mailboxes [custom_domain_id={custom_domain.id}]: {result.reason.value}"
)
return jsonify(error="Forbidden"), 400
if changed: if changed:
Session.commit() Session.commit()

View File

@ -1,22 +1,13 @@
from smtplib import SMTPRecipientsRefused from smtplib import SMTPRecipientsRefused
import arrow
from flask import g from flask import g
from flask import jsonify from flask import jsonify
from flask import request from flask import request
from app import mailbox_utils
from app.api.base import api_bp, require_api_auth from app.api.base import api_bp, require_api_auth
from app.config import JOB_DELETE_MAILBOX
from app.dashboard.views.mailbox import send_verification_email
from app.dashboard.views.mailbox_detail import verify_mailbox_change
from app.db import Session from app.db import Session
from app.email_utils import ( from app.models import Mailbox
mailbox_already_used,
email_can_be_used_as_mailbox,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import sanitize_email from app.utils import sanitize_email
@ -42,33 +33,21 @@ def create_mailbox():
the new mailbox dict the new mailbox dict
""" """
user = g.user user = g.user
mailbox_email = sanitize_email(request.get_json().get("email")) email = request.get_json().get("email")
if not email:
return jsonify(error="Invalid email"), 400
if not user.is_premium(): mailbox_email = sanitize_email(email)
return jsonify(error="Only premium plan can add additional mailbox"), 400
if not is_valid_email(mailbox_email): try:
return jsonify(error=f"{mailbox_email} invalid"), 400 new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
elif mailbox_already_used(mailbox_email, user): except mailbox_utils.MailboxError as e:
return jsonify(error=f"{mailbox_email} already used"), 400 return jsonify(error=e.msg), 400
elif not email_can_be_used_as_mailbox(mailbox_email):
return (
jsonify(
error=f"{mailbox_email} cannot be used. Please note a mailbox cannot "
f"be a disposable email address"
),
400,
)
else:
new_mailbox = Mailbox.create(email=mailbox_email, user_id=user.id)
Session.commit()
send_verification_email(user, new_mailbox) return (
jsonify(mailbox_to_dict(new_mailbox)),
return ( 201,
jsonify(mailbox_to_dict(new_mailbox)), )
201,
)
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["DELETE"]) @api_bp.route("/mailboxes/<int:mailbox_id>", methods=["DELETE"])
@ -86,47 +65,17 @@ def delete_mailbox(mailbox_id):
""" """
user = g.user user = g.user
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
return jsonify(error="Forbidden"), 403
if mailbox.id == user.default_mailbox_id:
return jsonify(error="You cannot delete the default mailbox"), 400
data = request.get_json() or {} data = request.get_json() or {}
transfer_mailbox_id = data.get("transfer_aliases_to") transfer_mailbox_id = data.get("transfer_aliases_to")
if transfer_mailbox_id and int(transfer_mailbox_id) >= 0: if transfer_mailbox_id and int(transfer_mailbox_id) >= 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id) transfer_mailbox_id = int(transfer_mailbox_id)
else:
transfer_mailbox_id = None
if not transfer_mailbox or transfer_mailbox.user_id != user.id: try:
return ( mailbox_utils.delete_mailbox(user, mailbox_id, transfer_mailbox_id)
jsonify(error="You must transfer the aliases to a mailbox you own."), except mailbox_utils.MailboxError as e:
403, return jsonify(error=e.msg), 400
)
if transfer_mailbox_id == mailbox_id:
return (
jsonify(
error="You can not transfer the aliases to the mailbox you want to delete."
),
400,
)
if not transfer_mailbox.verified:
return jsonify(error="Your new mailbox is not verified"), 400
# Schedule delete account job
LOG.w("schedule delete mailbox job for %s", mailbox)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id,
},
run_at=arrow.now(),
commit=True,
)
return jsonify(deleted=True), 200 return jsonify(deleted=True), 200
@ -168,20 +117,10 @@ def update_mailbox(mailbox_id):
if "email" in data: if "email" in data:
new_email = sanitize_email(data.get("email")) new_email = sanitize_email(data.get("email"))
if mailbox_already_used(new_email, user):
return jsonify(error=f"{new_email} already used"), 400
elif not email_can_be_used_as_mailbox(new_email):
return (
jsonify(
error=f"{new_email} cannot be used. Please note a mailbox cannot "
f"be a disposable email address"
),
400,
)
try: try:
verify_mailbox_change(user, mailbox, new_email) mailbox_utils.request_mailbox_email_change(user, mailbox, new_email)
except mailbox_utils.MailboxError as e:
return jsonify(error=e.msg), 400
except SMTPRecipientsRefused: except SMTPRecipientsRefused:
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400 return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
else: else:
@ -191,7 +130,7 @@ def update_mailbox(mailbox_id):
if "cancel_email_change" in data: if "cancel_email_change" in data:
cancel_email_change = data.get("cancel_email_change") cancel_email_change = data.get("cancel_email_change")
if cancel_email_change: if cancel_email_change:
mailbox.new_email = None mailbox_utils.cancel_email_change(mailbox.id, user)
changed = True changed = True
if changed: if changed:

View File

@ -1,3 +1,4 @@
from email_validator import EmailNotValidError
from flask import g from flask import g
from flask import jsonify, request from flask import jsonify, request
@ -61,8 +62,17 @@ def new_custom_alias_v2():
if not data: if not data:
return jsonify(error="request body cannot be empty"), 400 return jsonify(error="request body cannot be empty"), 400
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "") alias_prefix = data.get("alias_prefix", "")
signed_suffix = data.get("signed_suffix", "").strip() if not isinstance(alias_prefix, str) or not alias_prefix:
return jsonify(error="invalid value for alias_prefix"), 400
alias_prefix = alias_prefix.strip().lower().replace(" ", "")
signed_suffix = data.get("signed_suffix", "")
if not isinstance(signed_suffix, str) or not signed_suffix:
return jsonify(error="invalid value for signed_suffix"), 400
signed_suffix = signed_suffix.strip()
note = data.get("note") note = data.get("note")
alias_prefix = convert_to_id(alias_prefix) alias_prefix = convert_to_id(alias_prefix)
@ -93,12 +103,15 @@ def new_custom_alias_v2():
400, 400,
) )
alias = Alias.create( try:
user_id=user.id, alias = Alias.create(
email=full_alias, user_id=user.id,
mailbox_id=user.default_mailbox_id, email=full_alias,
note=note, mailbox_id=user.default_mailbox_id,
) note=note,
)
except EmailNotValidError:
return jsonify(error="Email is not valid"), 400
Session.commit() Session.commit()
@ -153,8 +166,17 @@ def new_custom_alias_v3():
if not isinstance(data, dict): if not isinstance(data, dict):
return jsonify(error="request body does not follow the required format"), 400 return jsonify(error="request body does not follow the required format"), 400
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "") alias_prefix_data = data.get("alias_prefix", "") or ""
if not isinstance(alias_prefix_data, str):
return jsonify(error="request body does not follow the required format"), 400
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
signed_suffix = data.get("signed_suffix", "") or "" signed_suffix = data.get("signed_suffix", "") or ""
if not isinstance(signed_suffix, str):
return jsonify(error="request body does not follow the required format"), 400
signed_suffix = signed_suffix.strip() signed_suffix = signed_suffix.strip()
mailbox_ids = data.get("mailbox_ids") mailbox_ids = data.get("mailbox_ids")

View File

@ -12,7 +12,7 @@ from app.models import (
SenderFormatEnum, SenderFormatEnum,
AliasSuffixEnum, AliasSuffixEnum,
) )
from app.proton.utils import perform_proton_account_unlink from app.proton.proton_unlink import perform_proton_account_unlink
def setting_to_dict(user: User): def setting_to_dict(user: User):
@ -144,5 +144,6 @@ def get_available_domains_for_random_alias_v2():
@require_api_auth @require_api_auth
def unlink_proton_account(): def unlink_proton_account():
user = g.user user = g.user
perform_proton_account_unlink(user) if not perform_proton_account_unlink(user):
return jsonify(error="The account cannot be unlinked"), 400
return jsonify({"ok": True}) return jsonify({"ok": True})

View File

@ -2,10 +2,11 @@ from flask import jsonify, g
from sqlalchemy_utils.types.arrow import arrow from sqlalchemy_utils.types.arrow import arrow
from app.api.base import api_bp, require_api_sudo, require_api_auth from app.api.base import api_bp, require_api_sudo, require_api_auth
from app import config from app.constants import JobType
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import Job, ApiToCookieToken from app.models import Job, ApiToCookieToken
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
@api_bp.route("/user", methods=["DELETE"]) @api_bp.route("/user", methods=["DELETE"])
@ -16,9 +17,14 @@ def delete_user():
""" """
# Schedule delete account job # Schedule delete account job
emit_user_audit_log(
user=g.user,
action=UserAuditLogAction.UserMarkedForDeletion,
message=f"Marked user {g.user.id} ({g.user.email}) for deletion from API",
)
LOG.w("schedule delete account job for %s", g.user) LOG.w("schedule delete account job for %s", g.user)
Job.create( Job.create(
name=config.JOB_DELETE_ACCOUNT, name=JobType.DELETE_ACCOUNT.value,
payload={"user_id": g.user.id}, payload={"user_id": g.user.id},
run_at=arrow.now(), run_at=arrow.now(),
commit=True, commit=True,
@ -38,6 +44,8 @@ def get_api_session_token():
token: "asdli3ldq39h9hd3", token: "asdli3ldq39h9hd3",
} }
""" """
if not g.api_key:
return jsonify(ok=False), 401
token = ApiToCookieToken.create( token = ApiToCookieToken.create(
user=g.user, user=g.user,
api_key_id=g.api_key.id, api_key_id=g.api_key.id,

View File

@ -10,8 +10,9 @@ from app.api.base import api_bp, require_api_auth
from app.config import SESSION_COOKIE_NAME from app.config import SESSION_COOKIE_NAME
from app.dashboard.views.index import get_stats from app.dashboard.views.index import get_stats
from app.db import Session from app.db import Session
from app.image_validation import detect_image_format, ImageFormat
from app.models import ApiKey, File, PartnerUser, User from app.models import ApiKey, File, PartnerUser, User
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
from app.session import logout_session from app.session import logout_session
from app.utils import random_string from app.utils import random_string
@ -78,17 +79,18 @@ def update_user_info():
data = request.get_json() or {} data = request.get_json() or {}
if "profile_picture" in data: if "profile_picture" in data:
if data["profile_picture"] is None: if user.profile_picture_id:
if user.profile_picture_id: file = user.profile_picture
file = user.profile_picture user.profile_picture_id = None
user.profile_picture_id = None Session.flush()
if file:
File.delete(file.id)
s3.delete(file.path)
Session.flush() Session.flush()
if file: if data["profile_picture"] is not None:
File.delete(file.id)
s3.delete(file.path)
Session.flush()
else:
raw_data = base64.decodebytes(data["profile_picture"].encode()) raw_data = base64.decodebytes(data["profile_picture"].encode())
if detect_image_format(raw_data) == ImageFormat.Unknown:
return jsonify(error="Unsupported image format"), 400
file_path = random_string(30) file_path = random_string(30)
file = File.create(user_id=user.id, path=file_path) file = File.create(user_id=user.id, path=file_path)
Session.flush() Session.flush()

View File

@ -7,6 +7,7 @@ from app.db import Session
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import ActivationCode from app.models import ActivationCode
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import sanitize_next_url from app.utils import sanitize_next_url
@ -47,6 +48,11 @@ def activate():
user = activation_code.user user = activation_code.user
user.activated = True user.activated = True
emit_user_audit_log(
user=user,
action=UserAuditLogAction.ActivateUser,
message=f"User has been activated: {user.email}",
)
login_user(user) login_user(user)
# activation code is to be used only once # activation code is to be used only once

View File

@ -10,6 +10,7 @@ from app.events.auth_event import LoginEvent
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import User from app.models import User
from app.pw_models import PasswordOracle
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
@ -43,6 +44,13 @@ def login():
user = User.get_by(email=email) or User.get_by(email=canonical_email) user = User.get_by(email=email) or User.get_by(email=canonical_email)
if not user or not user.check_password(form.password.data): if not user or not user.check_password(form.password.data):
if not user:
# Do the hash to avoid timing attacks nevertheless
dummy_pw = PasswordOracle()
dummy_pw.password = (
"$2b$12$ZWqpL73h4rGNfLkJohAFAu0isqSw/bX9p/tzpbWRz/To5FAftaW8u"
)
dummy_pw.check_password(form.password.data)
# Trigger rate limiter # Trigger rate limiter
g.deduct_limit = True g.deduct_limit = True
form.password.data = None form.password.data = None

View File

@ -23,7 +23,7 @@ from app.proton.proton_callback_handler import (
ProtonCallbackHandler, ProtonCallbackHandler,
Action, Action,
) )
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
from app.utils import sanitize_next_url, sanitize_scheme from app.utils import sanitize_next_url, sanitize_scheme
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize" _authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"

View File

@ -115,7 +115,8 @@ def register():
def send_activation_email(user, next_url): def send_activation_email(user, next_url):
# the activation code is valid for 1h # the activation code is valid for 1h and delete all previous codes
Session.query(ActivationCode).filter(ActivationCode.user_id == user.id).delete()
activation = ActivationCode.create(user_id=user.id, code=random_string(30)) activation = ActivationCode.create(user_id=user.id, code=random_string(30))
Session.commit() Session.commit()

View File

@ -9,6 +9,7 @@ from app.auth.views.login_utils import after_login
from app.db import Session from app.db import Session
from app.extensions import limiter from app.extensions import limiter
from app.models import ResetPasswordCode from app.models import ResetPasswordCode
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
class ResetPasswordForm(FlaskForm): class ResetPasswordForm(FlaskForm):
@ -59,6 +60,11 @@ def reset_password():
# this can be served to activate user too # this can be served to activate user too
user.activated = True user.activated = True
emit_user_audit_log(
user=user,
action=UserAuditLogAction.ResetPassword,
message="User has reset their password",
)
# remove all reset password codes # remove all reset password codes
ResetPasswordCode.filter_by(user_id=user.id).delete() ResetPasswordCode.filter_by(user_id=user.id).delete()

View File

@ -1,2 +1,3 @@
SHA1 = "dev" SHA1 = "dev"
BUILD_TIME = "1652365083" BUILD_TIME = "1652365083"
VERSION = SHA1

View File

@ -3,7 +3,7 @@ import random
import socket import socket
import string import string
from ast import literal_eval from ast import literal_eval
from typing import Callable, List from typing import Callable, List, Optional
from urllib.parse import urlparse from urllib.parse import urlparse
from dotenv import load_dotenv from dotenv import load_dotenv
@ -35,6 +35,44 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
return literal_eval(value) return literal_eval(value)
def get_env_dict(env_var: str) -> dict[str, str]:
"""
Get an env variable and convert it into a python dictionary with keys and values as strings.
Args:
env_var (str): env var, example: SL_DB
Syntax is: key1=value1;key2=value2
Components separated by ;
key and value separated by =
"""
value = os.getenv(env_var)
if not value:
return {}
components = value.split(";")
result = {}
for component in components:
if component == "":
continue
parts = component.split("=")
if len(parts) != 2:
raise Exception(f"Invalid config for env var {env_var}")
result[parts[0].strip()] = parts[1].strip()
return result
def get_env_csv(env_var: str, default: Optional[str]) -> list[str]:
"""
Get an env variable and convert it into a list of strings separated by,
Syntax is: val1,val2
"""
value = os.getenv(env_var, default)
if not value:
return []
return [field.strip() for field in value.split(",") if field.strip()]
config_file = os.environ.get("CONFIG") config_file = os.environ.get("CONFIG")
if config_file: if config_file:
config_file = get_abs_path(config_file) config_file = get_abs_path(config_file)
@ -144,6 +182,14 @@ FIRST_ALIAS_DOMAIN = os.environ.get("FIRST_ALIAS_DOMAIN") or EMAIL_DOMAIN
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")] # e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY") EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
PROTON_MX_SERVERS = get_env_csv(
"PROTON_MX_SERVERS", "mail.protonmail.ch., mailsec.protonmail.ch."
)
PROTON_EMAIL_DOMAINS = get_env_csv(
"PROTON_EMAIL_DOMAINS", "proton.me, protonmail.com, protonmail.ch, proton.ch, pm.me"
)
# disable the alias suffix, i.e. the ".random_word" part # disable the alias suffix, i.e. the ".random_word" part
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
@ -270,19 +316,6 @@ MFA_USER_ID = "mfa_user_id"
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH") FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD") FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
# Job names
JOB_ONBOARDING_1 = "onboarding-1"
JOB_ONBOARDING_2 = "onboarding-2"
JOB_ONBOARDING_3 = "onboarding-3"
JOB_ONBOARDING_4 = "onboarding-4"
JOB_BATCH_IMPORT = "batch-import"
JOB_DELETE_ACCOUNT = "delete-account"
JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
# for pagination # for pagination
PAGE_LIMIT = 20 PAGE_LIMIT = 20
@ -574,7 +607,6 @@ SKIP_MX_LOOKUP_ON_CHECK = False
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30)) MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None) UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
@ -588,3 +620,53 @@ EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
# We want it disabled by default, so only skip if defined # We want it disabled by default, so only skip if defined
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ
def read_webhook_enabled_user_ids() -> Optional[List[int]]:
user_ids = os.environ.get("EVENT_WEBHOOK_ENABLED_USER_IDS", None)
if user_ids is None:
return None
ids = []
for user_id in user_ids.split(","):
try:
ids.append(int(user_id.strip()))
except ValueError:
pass
return ids
EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_ids()
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
# It defaults to the regular DB_URI in case it's needed
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
def read_partner_dict(var: str) -> dict[int, str]:
partner_value = get_env_dict(var)
if len(partner_value) == 0:
return {}
res: dict[int, str] = {}
for partner_id in partner_value.keys():
try:
partner_id_int = int(partner_id.strip())
res[partner_id_int] = partner_value[partner_id]
except ValueError:
pass
return res
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
"PARTNER_DNS_CUSTOM_DOMAINS"
)
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
)
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
)
AUDIT_LOG_MAX_DAYS = int(os.environ.get("AUDIT_LOG_MAX_DAYS", 30))

View File

@ -1 +1,18 @@
import enum
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies" HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
class JobType(enum.Enum):
ONBOARDING_1 = "onboarding-1"
ONBOARDING_2 = "onboarding-2"
ONBOARDING_4 = "onboarding-4"
BATCH_IMPORT = "batch-import"
DELETE_ACCOUNT = "delete-account"
DELETE_MAILBOX = "delete-mailbox"
DELETE_DOMAIN = "delete-domain"
SEND_USER_REPORT = "send-user-report"
SEND_PROTON_WELCOME_1 = "proton-welcome-1"
SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"

138
app/app/contact_utils.py Normal file
View File

@ -0,0 +1,138 @@
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from sqlalchemy.exc import IntegrityError
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
from app.db import Session
from app.email_utils import generate_reply_email, parse_full_address
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Contact, Alias
from app.utils import sanitize_email
class ContactCreateError(Enum):
InvalidEmail = "Invalid email"
NotAllowed = "Your plan does not allow to create contacts"
Unknown = "Unknown error when trying to create contact"
@dataclass
class ContactCreateResult:
contact: Optional[Contact]
created: bool
error: Optional[ContactCreateError]
def __update_contact_if_needed(
contact: Contact, name: Optional[str], mail_from: Optional[str]
) -> ContactCreateResult:
if name and contact.name != name:
LOG.d(f"Setting {contact} name to {name}")
contact.name = name
Session.commit()
if mail_from and contact.mail_from is None:
LOG.d(f"Setting {contact} mail_from to {mail_from}")
contact.mail_from = mail_from
Session.commit()
return ContactCreateResult(contact, created=False, error=None)
def create_contact(
email: str,
alias: Alias,
name: Optional[str] = None,
mail_from: Optional[str] = None,
allow_empty_email: bool = False,
automatic_created: bool = False,
from_partner: bool = False,
) -> ContactCreateResult:
# If user cannot create contacts, they still need to be created when receiving an email for an alias
if not automatic_created and not alias.user.can_create_contacts():
return ContactCreateResult(
None, created=False, error=ContactCreateError.NotAllowed
)
# Parse emails with form 'name <email>'
try:
email_name, email = parse_full_address(email)
except ValueError:
email = ""
email_name = ""
# If no name is explicitly given try to get it from the parsed email
if name is None:
name = email_name[: Contact.MAX_NAME_LENGTH]
else:
name = name[: Contact.MAX_NAME_LENGTH]
# If still no name is there, make sure the name is None instead of empty string
if not name:
name = None
if name is not None and "\x00" in name:
LOG.w("Cannot use contact name because has \\x00")
name = ""
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
email = sanitize_email(email, not_lower=True)
if not is_valid_email(email):
LOG.w(f"invalid contact email {email}")
if not allow_empty_email:
return ContactCreateResult(
None, created=False, error=ContactCreateError.InvalidEmail
)
LOG.d("Create a contact with invalid email for %s", alias)
# either reuse a contact with empty email or create a new contact with empty email
email = ""
# If contact exists, update name and mail_from if needed
contact = Contact.get_by(alias_id=alias.id, website_email=email)
if contact is not None:
return __update_contact_if_needed(contact, name, mail_from)
# Create the contact
reply_email = generate_reply_email(email, alias)
alias_id = alias.id
try:
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
is_invalid_email = email == ""
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=email,
name=name,
reply_email=reply_email,
mail_from=mail_from,
automatic_created=automatic_created,
flags=flags,
invalid_email=is_invalid_email,
commit=True,
)
contact_id = contact.id
if automatic_created:
trail = ". Automatically created"
else:
trail = ". Created by user action"
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.CreateContact,
message=f"Created contact {contact_id} ({email}){trail}",
commit=True,
)
LOG.d(
f"Created contact {contact} for alias {alias} with email {email} invalid_email={is_invalid_email}"
)
return ContactCreateResult(contact, created=True, error=None)
except IntegrityError:
Session.rollback()
LOG.info(
f"Contact with email {email} for alias_id {alias_id} already existed, fetching from DB"
)
contact: Optional[Contact] = Contact.get_by(
alias_id=alias_id, website_email=email
)
if contact:
return __update_contact_if_needed(contact, name, mail_from)
else:
LOG.warning(
f"Could not find contact with email {email} for alias_id {alias_id} and it should exist"
)
return ContactCreateResult(
None, created=False, error=ContactCreateError.Unknown
)

149
app/app/coupon_utils.py Normal file
View File

@ -0,0 +1,149 @@
from typing import Optional
import arrow
from sqlalchemy import or_, update, and_
from app.config import ADMIN_EMAIL
from app.db import Session
from app.email_utils import send_email
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG
from app.models import (
User,
ManualSubscription,
Coupon,
LifetimeCoupon,
PartnerSubscription,
PartnerUser,
)
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
class CouponUserCannotRedeemError(Exception):
pass
def redeem_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
if user.lifetime:
LOG.i(f"User {user} is a lifetime SL user. Cannot redeem coupons")
raise CouponUserCannotRedeemError()
sub = user.get_active_subscription()
if sub and not isinstance(sub, ManualSubscription):
LOG.i(
f"User {user} has an active subscription that is not manual. Cannot redeem coupon {coupon_code}"
)
raise CouponUserCannotRedeemError()
coupon = Coupon.get_by(code=coupon_code)
if not coupon:
LOG.i(f"User is trying to redeem coupon {coupon_code} that does not exist")
return None
now = arrow.utcnow()
stmt = (
update(Coupon)
.where(
and_(
Coupon.code == coupon_code,
Coupon.used == False, # noqa: E712
or_(
Coupon.expires_date == None, # noqa: E711
Coupon.expires_date > now,
),
)
)
.values(used=True, used_by_user_id=user.id, updated_at=now)
)
res = Session.execute(stmt)
if res.rowcount == 0:
LOG.i(f"Coupon {coupon.id} could not be redeemed. It's expired or invalid.")
return None
LOG.i(
f"Redeemed normal coupon {coupon.id} for {coupon.nb_year} years by user {user}"
)
if sub:
# renew existing subscription
if sub.end_at > arrow.now():
sub.end_at = sub.end_at.shift(years=coupon.nb_year)
else:
sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
else:
# There may be an expired manual subscription
sub = ManualSubscription.get_by(user_id=user.id)
end_at = arrow.now().shift(years=coupon.nb_year, days=1)
if sub:
sub.end_at = end_at
else:
sub = ManualSubscription.create(
user_id=user.id,
end_at=end_at,
comment="using coupon code",
is_giveaway=coupon.is_giveaway,
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.Upgrade,
message=f"User {user} redeemed coupon {coupon.id} for {coupon.nb_year} years",
)
EventDispatcher.send_event(
user=user,
content=EventContent(
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
),
)
Session.commit()
return coupon
def redeem_lifetime_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
if user.lifetime:
return None
partner_sub = (
Session.query(PartnerSubscription)
.join(PartnerUser, PartnerUser.id == PartnerSubscription.partner_user_id)
.filter(PartnerUser.user_id == user.id, PartnerSubscription.lifetime == True) # noqa: E712
.first()
)
if partner_sub is not None:
return None
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=coupon_code)
if not coupon:
return None
stmt = (
update(LifetimeCoupon)
.where(
and_(
LifetimeCoupon.code == coupon_code,
LifetimeCoupon.nb_used > 0,
)
)
.values(nb_used=LifetimeCoupon.nb_used - 1)
)
res = Session.execute(stmt)
if res.rowcount == 0:
LOG.i("Coupon could not be redeemed")
return None
user.lifetime = True
user.lifetime_coupon_id = coupon.id
if coupon.paid:
user.paid_lifetime = True
EventDispatcher.send_event(
user=user,
content=EventContent(user_plan_change=UserPlanChanged(lifetime=True)),
)
Session.commit()
# notify admin
send_email(
ADMIN_EMAIL,
subject=f"User {user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
plaintext="",
html="",
)
return coupon

View File

@ -0,0 +1,206 @@
import arrow
import re
from dataclasses import dataclass
from enum import Enum
from typing import List, Optional
from app.constants import JobType
from app.db import Session
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
_MAX_MAILBOXES_PER_DOMAIN = 20
@dataclass
class CreateCustomDomainResult:
message: str = ""
message_category: str = ""
success: bool = False
instance: Optional[CustomDomain] = None
redirect: Optional[str] = None
class CannotUseDomainReason(Enum):
InvalidDomain = 1
BuiltinDomain = 2
DomainAlreadyUsed = 3
DomainPartOfUserEmail = 4
DomainUserInMailbox = 5
def message(self, domain: str) -> str:
if self == CannotUseDomainReason.InvalidDomain:
return "This is not a valid domain"
elif self == CannotUseDomainReason.BuiltinDomain:
return "A custom domain cannot be a built-in domain."
elif self == CannotUseDomainReason.DomainAlreadyUsed:
return f"{domain} already used"
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
elif self == CannotUseDomainReason.DomainUserInMailbox:
return f"{domain} already used in a SimpleLogin mailbox"
else:
raise Exception("Invalid CannotUseDomainReason")
class CannotSetCustomDomainMailboxesCause(Enum):
InvalidMailbox = "Something went wrong, please retry"
NoMailboxes = "You must select at least 1 mailbox"
TooManyMailboxes = (
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
)
@dataclass
class SetCustomDomainMailboxesResult:
success: bool
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
def is_valid_domain(domain: str) -> bool:
"""
Checks that a domain is valid according to RFC 1035
"""
if len(domain) > 255:
return False
if domain.endswith("."):
domain = domain[:-1] # Strip the trailing dot
labels = domain.split(".")
if not labels:
return False
for label in labels:
if not _ALLOWED_DOMAIN_REGEX.match(label):
return False
return True
def sanitize_domain(domain: str) -> str:
new_domain = domain.lower().strip()
if new_domain.startswith("http://"):
new_domain = new_domain[len("http://") :]
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
return new_domain
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
if not is_valid_domain(domain):
return CannotUseDomainReason.InvalidDomain
elif SLDomain.get_by(domain=domain):
return CannotUseDomainReason.BuiltinDomain
elif CustomDomain.get_by(domain=domain):
return CannotUseDomainReason.DomainAlreadyUsed
elif get_email_domain_part(user.email) == domain:
return CannotUseDomainReason.DomainPartOfUserEmail
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
).first():
return CannotUseDomainReason.DomainUserInMailbox
else:
return None
def create_custom_domain(
user: User, domain: str, partner_id: Optional[int] = None
) -> CreateCustomDomainResult:
if not user.is_premium():
return CreateCustomDomainResult(
message="Only premium plan can add custom domain",
message_category="warning",
)
new_domain = sanitize_domain(domain)
domain_forbidden_cause = can_domain_be_used(user, new_domain)
if domain_forbidden_cause:
return CreateCustomDomainResult(
message=domain_forbidden_cause.message(new_domain), message_category="error"
)
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in user.custom_domains:
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
# Add the partner_id in case it's passed
if partner_id is not None:
new_custom_domain.partner_id = partner_id
emit_user_audit_log(
user=user,
action=UserAuditLogAction.CreateCustomDomain,
message=f"Created custom domain {new_custom_domain.id} ({new_domain})",
)
Session.commit()
return CreateCustomDomainResult(
success=True,
instance=new_custom_domain,
)
def delete_custom_domain(domain: CustomDomain):
# Schedule delete domain job
LOG.w("schedule delete domain job for %s", domain)
domain.pending_deletion = True
Job.create(
name=JobType.DELETE_DOMAIN.value,
payload={"custom_domain_id": domain.id},
run_at=arrow.now(),
commit=True,
)
def set_custom_domain_mailboxes(
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
) -> SetCustomDomainMailboxesResult:
if len(mailbox_ids) == 0:
return SetCustomDomainMailboxesResult(
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
)
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
return SetCustomDomainMailboxesResult(
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
)
mailboxes = (
Session.query(Mailbox)
.filter(
Mailbox.id.in_(mailbox_ids),
Mailbox.user_id == user_id,
Mailbox.verified == True, # noqa: E712
)
.all()
)
if len(mailboxes) != len(mailbox_ids):
return SetCustomDomainMailboxesResult(
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
mailboxes_as_str = ",".join(map(str, mailbox_ids))
emit_user_audit_log(
user=custom_domain.user,
action=UserAuditLogAction.UpdateCustomDomain,
message=f"Updated custom domain {custom_domain.id} mailboxes (domain={custom_domain.domain}) (mailboxes={mailboxes_as_str})",
)
Session.commit()
return SetCustomDomainMailboxesResult(success=True)

View File

@ -1,37 +1,293 @@
from dataclasses import dataclass
from typing import List, Optional
from app import config
from app.constants import DMARC_RECORD
from app.db import Session from app.db import Session
from app.dns_utils import get_cname_record from app.dns_utils import (
DNSClient,
get_network_dns_client,
)
from app.models import CustomDomain from app.models import CustomDomain
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import random_string
@dataclass
class DomainValidationResult:
success: bool
errors: [str]
@dataclass
class ExpectedValidationRecords:
recommended: str
allowed: list[str]
def is_mx_equivalent(
mx_domains: dict[int, list[str]],
expected_mx_domains: dict[int, ExpectedValidationRecords],
) -> bool:
"""
Compare mx_domains with ref_mx_domains to see if they are equivalent.
mx_domains and ref_mx_domains are list of (priority, domain)
The priority order is taken into account but not the priority number.
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
"""
expected_prios = []
for prio in expected_mx_domains:
expected_prios.append(prio)
if len(expected_prios) != len(mx_domains):
return False
for prio_position, prio_value in enumerate(sorted(mx_domains.keys())):
for domain in mx_domains[prio_value]:
if domain not in expected_mx_domains[expected_prios[prio_position]].allowed:
return False
return True
class CustomDomainValidation: class CustomDomainValidation:
def __init__(self, dkim_domain: str): def __init__(
self,
dkim_domain: str,
dns_client: DNSClient = get_network_dns_client(),
partner_domains: Optional[dict[int, str]] = None,
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
):
self.dkim_domain = dkim_domain self.dkim_domain = dkim_domain
self._dkim_records = { self._dns_client = dns_client
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}") self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
for key in ("dkim", "dkim02", "dkim03") self._partner_domain_validation_prefixes = (
partner_domains_validation_prefixes
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
)
def get_ownership_verification_record(
self, domain: CustomDomain
) -> ExpectedValidationRecords:
prefixes = ["sl"]
if (
domain.partner_id is not None
and domain.partner_id in self._partner_domain_validation_prefixes
):
prefixes.insert(
0, self._partner_domain_validation_prefixes[domain.partner_id]
)
if not domain.ownership_txt_token:
domain.ownership_txt_token = random_string(30)
Session.commit()
valid = [
f"{prefix}-verification={domain.ownership_txt_token}" for prefix in prefixes
]
return ExpectedValidationRecords(recommended=valid[0], allowed=valid)
def get_expected_mx_records(
self, domain: CustomDomain
) -> dict[int, ExpectedValidationRecords]:
records = {}
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
domain = self._partner_domains[domain.partner_id]
records[10] = [f"mx1.{domain}."]
records[20] = [f"mx2.{domain}."]
# Default ones
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
if priority not in records:
records[priority] = []
records[priority].append(domain)
return {
priority: ExpectedValidationRecords(
recommended=records[priority][0], allowed=records[priority]
)
for priority in records
} }
def get_dkim_records(self) -> {str: str}: def get_expected_spf_domain(
""" self, domain: CustomDomain
Get a list of dkim records to set up. It will be ) -> ExpectedValidationRecords:
records = []
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
records.append(self._partner_domains[domain.partner_id])
else:
records.append(config.EMAIL_DOMAIN)
return ExpectedValidationRecords(recommended=records[0], allowed=records)
def get_expected_spf_record(self, domain: CustomDomain) -> str:
spf_domain = self.get_expected_spf_domain(domain)
return f"v=spf1 include:{spf_domain.recommended} ~all"
def get_dkim_records(
self, domain: CustomDomain
) -> {str: ExpectedValidationRecords}:
""" """
return self._dkim_records Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
it will return the default ones or the partner ones.
"""
# By default use the default domain
dkim_domains = [self.dkim_domain]
if domain.partner_id is not None:
# Domain is from a partner. Retrieve the partner config and use that domain as preferred if it exists
partner_domain = self._partner_domains.get(domain.partner_id, None)
if partner_domain is not None:
dkim_domains.insert(0, partner_domain)
output = {}
for key in ("dkim", "dkim02", "dkim03"):
records = [
f"{key}._domainkey.{dkim_domain}" for dkim_domain in dkim_domains
]
output[f"{key}._domainkey"] = ExpectedValidationRecords(
recommended=records[0], allowed=records
)
return output
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]: def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
""" """
Check if dkim records are properly set for this custom domain. Check if dkim records are properly set for this custom domain.
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
""" """
correct_records = {}
invalid_records = {} invalid_records = {}
for prefix, expected_record in self.get_dkim_records(): expected_records = self.get_dkim_records(custom_domain)
for prefix, expected_record in expected_records.items():
custom_record = f"{prefix}.{custom_domain.domain}" custom_record = f"{prefix}.{custom_domain.domain}"
dkim_record = get_cname_record(custom_record) dkim_record = self._dns_client.get_cname_record(custom_record)
if dkim_record != expected_record: if dkim_record in expected_record.allowed:
correct_records[prefix] = custom_record
else:
invalid_records[custom_record] = dkim_record or "empty" invalid_records[custom_record] = dkim_record or "empty"
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
# HACK
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
# the domain validated to continue seeing it as validated (although showing them the missing records).
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
# we will remove the dkim_verified flag.
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
if custom_domain.dkim_verified: if custom_domain.dkim_verified:
return invalid_records # Check if at least the original dkim is there
if correct_records.get("dkim._domainkey") is not None:
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
return invalid_records
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
# rest of the code path, returning the invalid records and clearing the flag
custom_domain.dkim_verified = len(invalid_records) == 0 custom_domain.dkim_verified = len(invalid_records) == 0
if custom_domain.dkim_verified:
emit_user_audit_log(
user=custom_domain.user,
action=UserAuditLogAction.VerifyCustomDomain,
message=f"Verified DKIM records for custom domain {custom_domain.id} ({custom_domain.domain})",
)
Session.commit() Session.commit()
return invalid_records return invalid_records
def validate_domain_ownership(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
"""
Check if the custom_domain has added the ownership verification records
"""
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
expected_verification_records = self.get_ownership_verification_record(
custom_domain
)
found = False
for verification_record in expected_verification_records.allowed:
if verification_record in txt_records:
found = True
break
if found:
custom_domain.ownership_verified = True
emit_user_audit_log(
user=custom_domain.user,
action=UserAuditLogAction.VerifyCustomDomain,
message=f"Verified ownership for custom domain {custom_domain.id} ({custom_domain.domain})",
)
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
return DomainValidationResult(success=False, errors=txt_records)
def validate_mx_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
expected_mx_records = self.get_expected_mx_records(custom_domain)
if not is_mx_equivalent(mx_domains, expected_mx_records):
errors = []
for prio in mx_domains:
for mx_domain in mx_domains[prio]:
errors.append(f"{prio} {mx_domain}")
return DomainValidationResult(success=False, errors=errors)
else:
custom_domain.verified = True
emit_user_audit_log(
user=custom_domain.user,
action=UserAuditLogAction.VerifyCustomDomain,
message=f"Verified MX records for custom domain {custom_domain.id} ({custom_domain.domain})",
)
Session.commit()
return DomainValidationResult(success=True, errors=[])
def validate_spf_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
if len(set(expected_spf_domain.allowed).intersection(set(spf_domains))) > 0:
custom_domain.spf_verified = True
emit_user_audit_log(
user=custom_domain.user,
action=UserAuditLogAction.VerifyCustomDomain,
message=f"Verified SPF records for custom domain {custom_domain.id} ({custom_domain.domain})",
)
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
custom_domain.spf_verified = False
Session.commit()
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
return DomainValidationResult(
success=False,
errors=cleaned_records,
)
def validate_dmarc_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
if DMARC_RECORD in txt_records:
custom_domain.dmarc_verified = True
emit_user_audit_log(
user=custom_domain.user,
action=UserAuditLogAction.VerifyCustomDomain,
message=f"Verified DMARC records for custom domain {custom_domain.id} ({custom_domain.domain})",
)
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
custom_domain.dmarc_verified = False
Session.commit()
return DomainValidationResult(success=False, errors=txt_records)
def __clean_spf_records(
self, txt_records: List[str], custom_domain: CustomDomain
) -> List[str]:
final_records = []
verification_records = self.get_ownership_verification_record(custom_domain)
for record in txt_records:
if record not in verification_records.allowed:
final_records.append(record)
return final_records

View File

@ -1,3 +1,5 @@
import secrets
import arrow import arrow
from flask import ( from flask import (
render_template, render_template,
@ -37,7 +39,7 @@ from app.models import (
SenderFormatEnum, SenderFormatEnum,
UnsubscribeBehaviourEnum, UnsubscribeBehaviourEnum,
) )
from app.proton.utils import perform_proton_account_unlink from app.proton.proton_unlink import perform_proton_account_unlink
from app.utils import ( from app.utils import (
random_string, random_string,
CSRFValidationForm, CSRFValidationForm,
@ -163,7 +165,7 @@ def send_reset_password_email(user):
""" """
# the activation code is valid for 1h # the activation code is valid for 1h
reset_password_code = ResetPasswordCode.create( reset_password_code = ResetPasswordCode.create(
user_id=user.id, code=random_string(60) user_id=user.id, code=secrets.token_urlsafe(32)
) )
Session.commit() Session.commit()
@ -237,6 +239,8 @@ def unlink_proton_account():
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting")) return redirect(url_for("dashboard.setting"))
perform_proton_account_unlink(current_user) if not perform_proton_account_unlink(current_user):
flash("Your Proton account has been unlinked", "success") flash("Account cannot be unlinked", "warning")
else:
flash("Your Proton account has been unlinked", "success")
return redirect(url_for("dashboard.setting")) return redirect(url_for("dashboard.setting"))

View File

@ -1,5 +1,6 @@
from dataclasses import dataclass from dataclasses import dataclass
from operator import or_ from operator import or_
from typing import Optional
from flask import render_template, request, redirect, flash from flask import render_template, request, redirect, flash
from flask import url_for from flask import url_for
@ -9,13 +10,11 @@ from sqlalchemy import and_, func, case
from wtforms import StringField, validators, ValidationError from wtforms import StringField, validators, ValidationError
# Need to import directly from config to allow modification from the tests # Need to import directly from config to allow modification from the tests
from app import config, parallel_limiter from app import config, parallel_limiter, contact_utils
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
from app.contact_utils import ContactCreateError
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session from app.db import Session
from app.email_utils import (
generate_reply_email,
parse_full_address,
)
from app.email_validation import is_valid_email from app.email_validation import is_valid_email
from app.errors import ( from app.errors import (
CannotCreateContactForReverseAlias, CannotCreateContactForReverseAlias,
@ -24,8 +23,8 @@ from app.errors import (
ErrContactAlreadyExists, ErrContactAlreadyExists,
) )
from app.log import LOG from app.log import LOG
from app.models import Alias, Contact, EmailLog, User from app.models import Alias, Contact, EmailLog
from app.utils import sanitize_email, CSRFValidationForm from app.utils import CSRFValidationForm
def email_validator(): def email_validator():
@ -51,7 +50,7 @@ def email_validator():
return _check return _check
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact: def create_contact(alias: Alias, contact_address: str) -> Contact:
""" """
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS. Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
Can throw exceptions: Can throw exceptions:
@ -61,37 +60,23 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
""" """
if not contact_address: if not contact_address:
raise ErrAddressInvalid("Empty address") raise ErrAddressInvalid("Empty address")
try: output = contact_utils.create_contact(email=contact_address, alias=alias)
contact_name, contact_email = parse_full_address(contact_address) if output.error == ContactCreateError.InvalidEmail:
except ValueError:
raise ErrAddressInvalid(contact_address) raise ErrAddressInvalid(contact_address)
elif output.error == ContactCreateError.NotAllowed:
contact_email = sanitize_email(contact_email)
if not is_valid_email(contact_email):
raise ErrAddressInvalid(contact_email)
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
raise ErrContactAlreadyExists(contact)
if not user.can_create_contacts():
raise ErrContactErrorUpgradeNeeded() raise ErrContactErrorUpgradeNeeded()
elif output.error is not None:
raise ErrAddressInvalid("Invalid address")
elif not output.created:
raise ErrContactAlreadyExists(output.contact)
contact = Contact.create( contact = output.contact
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
reply_email=generate_reply_email(contact_email, alias),
)
LOG.d( LOG.d(
"create reverse-alias for %s %s, reverse alias:%s", "create reverse-alias for %s %s, reverse alias:%s",
contact_address, contact_address,
alias, alias,
contact.reply_email, contact.reply_email,
) )
Session.commit()
return contact return contact
@ -207,7 +192,7 @@ def get_contact_infos(
def delete_contact(alias: Alias, contact_id: int): def delete_contact(alias: Alias, contact_id: int):
contact = Contact.get(contact_id) contact: Optional[Contact] = Contact.get(contact_id)
if not contact: if not contact:
flash("Unknown error. Refresh the page", "warning") flash("Unknown error. Refresh the page", "warning")
@ -215,6 +200,11 @@ def delete_contact(alias: Alias, contact_id: int):
flash("You cannot delete reverse-alias", "warning") flash("You cannot delete reverse-alias", "warning")
else: else:
delete_contact_email = contact.website_email delete_contact_email = contact.website_email
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.DeleteContact,
message=f"Delete contact {contact_id} ({contact.email})",
)
Contact.delete(contact_id) Contact.delete(contact_id)
Session.commit() Session.commit()
@ -237,7 +227,10 @@ def alias_contact_manager(alias_id):
page = 0 page = 0
if request.args.get("page"): if request.args.get("page"):
page = int(request.args.get("page")) try:
page = int(request.args.get("page"))
except ValueError:
pass
query = request.args.get("query") or "" query = request.args.get("query") or ""
@ -261,7 +254,7 @@ def alias_contact_manager(alias_id):
if new_contact_form.validate(): if new_contact_form.validate():
contact_address = new_contact_form.email.data.strip() contact_address = new_contact_form.email.data.strip()
try: try:
contact = create_contact(current_user, alias, contact_address) contact = create_contact(alias, contact_address)
except ( except (
ErrContactErrorUpgradeNeeded, ErrContactErrorUpgradeNeeded,
ErrAddressInvalid, ErrAddressInvalid,

View File

@ -7,6 +7,7 @@ from flask import render_template, redirect, url_for, flash, request
from flask_login import login_required, current_user from flask_login import login_required, current_user
from app import config from app import config
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
from app.alias_utils import transfer_alias from app.alias_utils import transfer_alias
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
@ -57,6 +58,12 @@ def alias_transfer_send_route(alias_id):
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}" transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
alias.transfer_token = hmac_alias_transfer_token(transfer_token) alias.transfer_token = hmac_alias_transfer_token(transfer_token)
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24) alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
emit_alias_audit_log(
alias,
AliasAuditLogAction.InitiateTransferAlias,
"Initiated alias transfer",
)
Session.commit() Session.commit()
alias_transfer_url = ( alias_transfer_url = (
config.URL config.URL

View File

@ -3,7 +3,7 @@ from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required, current_user from flask_login import login_required, current_user
from app import s3 from app import s3
from app.config import JOB_BATCH_IMPORT from app.constants import JobType
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session from app.db import Session
@ -64,7 +64,7 @@ def batch_import_route():
# Schedule batch import job # Schedule batch import job
Job.create( Job.create(
name=JOB_BATCH_IMPORT, name=JobType.BATCH_IMPORT.value,
payload={"batch_import_id": bi.id}, payload={"batch_import_id": bi.id},
run_at=arrow.now(), run_at=arrow.now(),
) )

View File

@ -1,8 +1,11 @@
from typing import Optional
from flask import render_template, request, redirect, url_for, flash from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
from wtforms import StringField, validators from wtforms import StringField, validators
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session from app.db import Session
from app.models import Contact from app.models import Contact
@ -20,7 +23,7 @@ class PGPContactForm(FlaskForm):
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"]) @dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
@login_required @login_required
def contact_detail_route(contact_id): def contact_detail_route(contact_id):
contact = Contact.get(contact_id) contact: Optional[Contact] = Contact.get(contact_id)
if not contact or contact.user_id != current_user.id: if not contact or contact.user_id != current_user.id:
flash("You cannot see this page", "warning") flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index")) return redirect(url_for("dashboard.index"))
@ -50,6 +53,11 @@ def contact_detail_route(contact_id):
except PGPException: except PGPException:
flash("Cannot add the public key, please verify it", "error") flash("Cannot add the public key, please verify it", "error")
else: else:
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.UpdateContact,
message=f"Added PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
)
Session.commit() Session.commit()
flash( flash(
f"PGP public key for {contact.email} is saved successfully", f"PGP public key for {contact.email} is saved successfully",
@ -62,6 +70,11 @@ def contact_detail_route(contact_id):
) )
elif pgp_form.action.data == "remove": elif pgp_form.action.data == "remove":
# Free user can decide to remove contact PGP key # Free user can decide to remove contact PGP key
emit_alias_audit_log(
alias=alias,
action=AliasAuditLogAction.UpdateContact,
message=f"Removed PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
)
contact.pgp_public_key = None contact.pgp_public_key = None
contact.pgp_finger_print = None contact.pgp_finger_print = None
Session.commit() Session.commit()

View File

@ -1,17 +1,15 @@
import arrow import arrow
from flask import render_template, flash, redirect, url_for, request from flask import render_template, flash, redirect, url_for
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
from wtforms import StringField, validators from wtforms import StringField, validators
from app import parallel_limiter from app import parallel_limiter
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
from app.coupon_utils import redeem_coupon, CouponUserCannotRedeemError
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session
from app.log import LOG from app.log import LOG
from app.models import ( from app.models import (
ManualSubscription,
Coupon,
Subscription, Subscription,
AppleSubscription, AppleSubscription,
CoinbaseSubscription, CoinbaseSubscription,
@ -58,56 +56,23 @@ def coupon_route():
if coupon_form.validate_on_submit(): if coupon_form.validate_on_submit():
code = coupon_form.code.data code = coupon_form.code.data
try:
coupon: Coupon = Coupon.get_by(code=code) coupon = redeem_coupon(code, current_user)
if coupon and not coupon.used: if coupon:
if coupon.expires_date and coupon.expires_date < arrow.now():
flash(
f"The coupon was expired on {coupon.expires_date.humanize()}",
"error",
)
return redirect(request.url)
updated = (
Session.query(Coupon)
.filter_by(code=code, used=False)
.update({"used_by_user_id": current_user.id, "used": True})
)
if updated != 1:
flash("Coupon is not valid", "error")
return redirect(request.url)
manual_sub: ManualSubscription = ManualSubscription.get_by(
user_id=current_user.id
)
if manual_sub:
# renew existing subscription
if manual_sub.end_at > arrow.now():
manual_sub.end_at = manual_sub.end_at.shift(years=coupon.nb_year)
else:
manual_sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
Session.commit()
flash(
f"Your current subscription is extended to {manual_sub.end_at.humanize()}",
"success",
)
else:
ManualSubscription.create(
user_id=current_user.id,
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
comment="using coupon code",
is_giveaway=coupon.is_giveaway,
commit=True,
)
flash( flash(
"Your account has been upgraded to Premium, thanks for your support!", "Your account has been upgraded to Premium, thanks for your support!",
"success", "success",
) )
else:
return redirect(url_for("dashboard.index")) flash(
"This coupon cannot be redeemed. It's invalid or has expired",
else: "warning",
flash(f"Code *{code}* expired or invalid", "warning") )
except CouponUserCannotRedeemError:
flash(
"You have an active subscription. Please remove it before redeeming a coupon",
"warning",
)
return render_template( return render_template(
"dashboard/coupon.html", "dashboard/coupon.html",

View File

@ -5,11 +5,9 @@ from wtforms import StringField, validators
from app import parallel_limiter from app import parallel_limiter
from app.config import EMAIL_SERVERS_WITH_PRIORITY from app.config import EMAIL_SERVERS_WITH_PRIORITY
from app.custom_domain_utils import create_custom_domain
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session from app.models import CustomDomain
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
class NewCustomDomainForm(FlaskForm): class NewCustomDomainForm(FlaskForm):
@ -23,13 +21,12 @@ class NewCustomDomainForm(FlaskForm):
@parallel_limiter.lock(only_when=lambda: request.method == "POST") @parallel_limiter.lock(only_when=lambda: request.method == "POST")
def custom_domain(): def custom_domain():
custom_domains = CustomDomain.filter_by( custom_domains = CustomDomain.filter_by(
user_id=current_user.id, is_sl_subdomain=False user_id=current_user.id,
is_sl_subdomain=False,
pending_deletion=False,
).all() ).all()
mailboxes = current_user.mailboxes()
new_custom_domain_form = NewCustomDomainForm() new_custom_domain_form = NewCustomDomainForm()
errors = {}
if request.method == "POST": if request.method == "POST":
if request.form.get("form-name") == "create": if request.form.get("form-name") == "create":
if not current_user.is_premium(): if not current_user.is_premium():
@ -37,87 +34,25 @@ def custom_domain():
return redirect(url_for("dashboard.custom_domain")) return redirect(url_for("dashboard.custom_domain"))
if new_custom_domain_form.validate(): if new_custom_domain_form.validate():
new_domain = new_custom_domain_form.domain.data.lower().strip() res = create_custom_domain(
user=current_user, domain=new_custom_domain_form.domain.data
if new_domain.startswith("http://"): )
new_domain = new_domain[len("http://") :] if res.success:
flash(f"New domain {res.instance.domain} is created", "success")
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
if SLDomain.get_by(domain=new_domain):
flash("A custom domain cannot be a built-in domain.", "error")
elif CustomDomain.get_by(domain=new_domain):
flash(f"{new_domain} already used", "error")
elif get_email_domain_part(current_user.email) == new_domain:
flash(
"You cannot add a domain that you are currently using for your personal email. "
"Please change your personal email to your real email",
"error",
)
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
).first():
flash(
f"{new_domain} already used in a SimpleLogin mailbox", "error"
)
else:
new_custom_domain = CustomDomain.create(
domain=new_domain, user_id=current_user.id
)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in current_user.custom_domains:
if (
new_domain.endswith("." + root_cd.domain)
and root_cd.ownership_verified
):
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
Session.commit()
mailbox_ids = request.form.getlist("mailbox_ids")
if mailbox_ids:
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(url_for("dashboard.custom_domain"))
mailboxes.append(mailbox)
for mailbox in mailboxes:
DomainMailbox.create(
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
)
Session.commit()
flash(
f"New domain {new_custom_domain.domain} is created", "success"
)
return redirect( return redirect(
url_for( url_for(
"dashboard.domain_detail_dns", "dashboard.domain_detail_dns",
custom_domain_id=new_custom_domain.id, custom_domain_id=res.instance.id,
) )
) )
else:
flash(res.message, res.message_category)
if res.redirect:
return redirect(url_for(res.redirect))
return render_template( return render_template(
"dashboard/custom_domain.html", "dashboard/custom_domain.html",
custom_domains=custom_domains, custom_domains=custom_domains,
new_custom_domain_form=new_custom_domain_form, new_custom_domain_form=new_custom_domain_form,
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY, EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
errors=errors,
mailboxes=mailboxes,
) )

View File

@ -3,11 +3,12 @@ from flask import flash, redirect, url_for, request, render_template
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
from app.config import JOB_DELETE_ACCOUNT from app.constants import JobType
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.log import LOG from app.log import LOG
from app.models import Subscription, Job from app.models import Subscription, Job
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
class DeleteDirForm(FlaskForm): class DeleteDirForm(FlaskForm):
@ -33,8 +34,13 @@ def delete_account():
# Schedule delete account job # Schedule delete account job
LOG.w("schedule delete account job for %s", current_user) LOG.w("schedule delete account job for %s", current_user)
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UserMarkedForDeletion,
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
)
Job.create( Job.create(
name=JOB_DELETE_ACCOUNT, name=JobType.DELETE_ACCOUNT.value,
payload={"user_id": current_user.id}, payload={"user_id": current_user.id},
run_at=arrow.now(), run_at=arrow.now(),
commit=True, commit=True,

View File

@ -1,3 +1,5 @@
from typing import Optional
from flask import render_template, request, redirect, url_for, flash from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
@ -20,6 +22,7 @@ from app.dashboard.base import dashboard_bp
from app.db import Session from app.db import Session
from app.errors import DirectoryInTrashError from app.errors import DirectoryInTrashError
from app.models import Directory, Mailbox, DirectoryMailbox from app.models import Directory, Mailbox, DirectoryMailbox
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
class NewDirForm(FlaskForm): class NewDirForm(FlaskForm):
@ -69,7 +72,9 @@ def directory():
if not delete_dir_form.validate(): if not delete_dir_form.validate():
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
dir_obj = Directory.get(delete_dir_form.directory_id.data) dir_obj: Optional[Directory] = Directory.get(
delete_dir_form.directory_id.data
)
if not dir_obj: if not dir_obj:
flash("Unknown error. Refresh the page", "warning") flash("Unknown error. Refresh the page", "warning")
@ -79,6 +84,11 @@ def directory():
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
name = dir_obj.name name = dir_obj.name
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.DeleteDirectory,
message=f"Delete directory {dir_obj.id} ({dir_obj.name})",
)
Directory.delete(dir_obj.id) Directory.delete(dir_obj.id)
Session.commit() Session.commit()
flash(f"Directory {name} has been deleted", "success") flash(f"Directory {name} has been deleted", "success")
@ -90,7 +100,7 @@ def directory():
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
dir_id = toggle_dir_form.directory_id.data dir_id = toggle_dir_form.directory_id.data
dir_obj = Directory.get(dir_id) dir_obj: Optional[Directory] = Directory.get(dir_id)
if not dir_obj or dir_obj.user_id != current_user.id: if not dir_obj or dir_obj.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning") flash("Unknown error. Refresh the page", "warning")
@ -103,6 +113,11 @@ def directory():
dir_obj.disabled = True dir_obj.disabled = True
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning") flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateDirectory,
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) set disabled = {dir_obj.disabled}",
)
Session.commit() Session.commit()
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
@ -112,7 +127,7 @@ def directory():
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory")) return redirect(url_for("dashboard.directory"))
dir_id = update_dir_form.directory_id.data dir_id = update_dir_form.directory_id.data
dir_obj = Directory.get(dir_id) dir_obj: Optional[Directory] = Directory.get(dir_id)
if not dir_obj or dir_obj.user_id != current_user.id: if not dir_obj or dir_obj.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning") flash("Unknown error. Refresh the page", "warning")
@ -143,6 +158,12 @@ def directory():
for mailbox in mailboxes: for mailbox in mailboxes:
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id) DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
mailboxes_as_str = ",".join(map(str, mailbox_ids))
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateDirectory,
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) mailboxes ({mailboxes_as_str})",
)
Session.commit() Session.commit()
flash(f"Directory {dir_obj.name} has been updated", "success") flash(f"Directory {dir_obj.name} has been updated", "success")
@ -181,6 +202,11 @@ def directory():
new_dir = Directory.create( new_dir = Directory.create(
name=new_dir_name, user_id=current_user.id name=new_dir_name, user_id=current_user.id
) )
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.CreateDirectory,
message=f"New directory {new_dir.name} ({new_dir.name})",
)
except DirectoryInTrashError: except DirectoryInTrashError:
flash( flash(
f"{new_dir_name} has been used before and cannot be reused", f"{new_dir_name} has been used before and cannot be reused",

View File

@ -1,33 +1,26 @@
import re import re
import arrow
from flask import render_template, request, redirect, url_for, flash from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
from wtforms import StringField, validators, IntegerField from wtforms import StringField, validators, IntegerField
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
from app.constants import DMARC_RECORD
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
from app.custom_domain_validation import CustomDomainValidation from app.custom_domain_validation import CustomDomainValidation
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session from app.db import Session
from app.dns_utils import (
get_mx_domains,
get_spf_domain,
get_txt_record,
is_mx_equivalent,
)
from app.log import LOG
from app.models import ( from app.models import (
CustomDomain, CustomDomain,
Alias, Alias,
DomainDeletedAlias, DomainDeletedAlias,
Mailbox, Mailbox,
DomainMailbox,
AutoCreateRule, AutoCreateRule,
AutoCreateRuleMailbox, AutoCreateRuleMailbox,
Job,
) )
from app.regex_utils import regex_match from app.regex_utils import regex_match
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import random_string, CSRFValidationForm from app.utils import random_string, CSRFValidationForm
@ -44,13 +37,9 @@ def domain_detail_dns(custom_domain_id):
custom_domain.ownership_txt_token = random_string(30) custom_domain.ownership_txt_token = random_string(30)
Session.commit() Session.commit()
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
domain_validator = CustomDomainValidation(EMAIL_DOMAIN) domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
csrf_form = CSRFValidationForm() csrf_form = CSRFValidationForm()
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = [] mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
@ -59,15 +48,14 @@ def domain_detail_dns(custom_domain_id):
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(request.url) return redirect(request.url)
if request.form.get("form-name") == "check-ownership": if request.form.get("form-name") == "check-ownership":
txt_records = get_txt_record(custom_domain.domain) ownership_validation_result = domain_validator.validate_domain_ownership(
custom_domain
if custom_domain.get_ownership_dns_txt_value() in txt_records: )
if ownership_validation_result.success:
flash( flash(
"Domain ownership is verified. Please proceed to the other records setup", "Domain ownership is verified. Please proceed to the other records setup",
"success", "success",
) )
custom_domain.ownership_verified = True
Session.commit()
return redirect( return redirect(
url_for( url_for(
"dashboard.domain_detail_dns", "dashboard.domain_detail_dns",
@ -78,36 +66,28 @@ def domain_detail_dns(custom_domain_id):
else: else:
flash("We can't find the needed TXT record", "error") flash("We can't find the needed TXT record", "error")
ownership_ok = False ownership_ok = False
ownership_errors = txt_records ownership_errors = ownership_validation_result.errors
elif request.form.get("form-name") == "check-mx": elif request.form.get("form-name") == "check-mx":
mx_domains = get_mx_domains(custom_domain.domain) mx_validation_result = domain_validator.validate_mx_records(custom_domain)
if mx_validation_result.success:
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
flash("The MX record is not correctly set", "warning")
mx_ok = False
# build mx_errors to show to user
mx_errors = [
f"{priority} {domain}" for (priority, domain) in mx_domains
]
else:
flash( flash(
"Your domain can start receiving emails. You can now use it to create alias", "Your domain can start receiving emails. You can now use it to create alias",
"success", "success",
) )
custom_domain.verified = True
Session.commit()
return redirect( return redirect(
url_for( url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id "dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
) )
) )
else:
flash("The MX record is not correctly set", "warning")
mx_ok = False
mx_errors = mx_validation_result.errors
elif request.form.get("form-name") == "check-spf": elif request.form.get("form-name") == "check-spf":
spf_domains = get_spf_domain(custom_domain.domain) spf_validation_result = domain_validator.validate_spf_records(custom_domain)
if EMAIL_DOMAIN in spf_domains: if spf_validation_result.success:
custom_domain.spf_verified = True
Session.commit()
flash("SPF is setup correctly", "success") flash("SPF is setup correctly", "success")
return redirect( return redirect(
url_for( url_for(
@ -115,14 +95,12 @@ def domain_detail_dns(custom_domain_id):
) )
) )
else: else:
custom_domain.spf_verified = False
Session.commit()
flash( flash(
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.", f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
"warning", "warning",
) )
spf_ok = False spf_ok = False
spf_errors = get_txt_record(custom_domain.domain) spf_errors = spf_validation_result.errors
elif request.form.get("form-name") == "check-dkim": elif request.form.get("form-name") == "check-dkim":
dkim_errors = domain_validator.validate_dkim_records(custom_domain) dkim_errors = domain_validator.validate_dkim_records(custom_domain)
@ -138,10 +116,10 @@ def domain_detail_dns(custom_domain_id):
flash("DKIM: the CNAME record is not correctly set", "warning") flash("DKIM: the CNAME record is not correctly set", "warning")
elif request.form.get("form-name") == "check-dmarc": elif request.form.get("form-name") == "check-dmarc":
txt_records = get_txt_record("_dmarc." + custom_domain.domain) dmarc_validation_result = domain_validator.validate_dmarc_records(
if dmarc_record in txt_records: custom_domain
custom_domain.dmarc_verified = True )
Session.commit() if dmarc_validation_result.success:
flash("DMARC is setup correctly", "success") flash("DMARC is setup correctly", "success")
return redirect( return redirect(
url_for( url_for(
@ -149,19 +127,23 @@ def domain_detail_dns(custom_domain_id):
) )
) )
else: else:
custom_domain.dmarc_verified = False
Session.commit()
flash( flash(
"DMARC: The TXT record is not correctly set", "DMARC: The TXT record is not correctly set",
"warning", "warning",
) )
dmarc_ok = False dmarc_ok = False
dmarc_errors = txt_records dmarc_errors = dmarc_validation_result.errors
return render_template( return render_template(
"dashboard/domain_detail/dns.html", "dashboard/domain_detail/dns.html",
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY, EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
dkim_records=domain_validator.get_dkim_records(), ownership_records=domain_validator.get_ownership_verification_record(
custom_domain
),
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
dkim_records=domain_validator.get_dkim_records(custom_domain),
spf_record=domain_validator.get_expected_spf_record(custom_domain),
dmarc_record=DMARC_RECORD,
**locals(), **locals(),
) )
@ -183,6 +165,11 @@ def domain_detail(custom_domain_id):
return redirect(request.url) return redirect(request.url)
if request.form.get("form-name") == "switch-catch-all": if request.form.get("form-name") == "switch-catch-all":
custom_domain.catch_all = not custom_domain.catch_all custom_domain.catch_all = not custom_domain.catch_all
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateCustomDomain,
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) catch all to {custom_domain.catch_all}",
)
Session.commit() Session.commit()
if custom_domain.catch_all: if custom_domain.catch_all:
@ -201,6 +188,11 @@ def domain_detail(custom_domain_id):
elif request.form.get("form-name") == "set-name": elif request.form.get("form-name") == "set-name":
if request.form.get("action") == "save": if request.form.get("action") == "save":
custom_domain.name = request.form.get("alias-name").replace("\n", "") custom_domain.name = request.form.get("alias-name").replace("\n", "")
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateCustomDomain,
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) name",
)
Session.commit() Session.commit()
flash( flash(
f"Default alias name for Domain {custom_domain.domain} has been set", f"Default alias name for Domain {custom_domain.domain} has been set",
@ -208,6 +200,11 @@ def domain_detail(custom_domain_id):
) )
else: else:
custom_domain.name = None custom_domain.name = None
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateCustomDomain,
message=f"Cleared custom domain {custom_domain.id} ({custom_domain.domain}) name",
)
Session.commit() Session.commit()
flash( flash(
f"Default alias name for Domain {custom_domain.domain} has been removed", f"Default alias name for Domain {custom_domain.domain} has been removed",
@ -221,6 +218,11 @@ def domain_detail(custom_domain_id):
custom_domain.random_prefix_generation = ( custom_domain.random_prefix_generation = (
not custom_domain.random_prefix_generation not custom_domain.random_prefix_generation
) )
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateCustomDomain,
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) random prefix generation to {custom_domain.random_prefix_generation}",
)
Session.commit() Session.commit()
if custom_domain.random_prefix_generation: if custom_domain.random_prefix_generation:
@ -238,40 +240,16 @@ def domain_detail(custom_domain_id):
) )
elif request.form.get("form-name") == "update": elif request.form.get("form-name") == "update":
mailbox_ids = request.form.getlist("mailbox_ids") mailbox_ids = request.form.getlist("mailbox_ids")
# check if mailbox is not tempered with result = set_custom_domain_mailboxes(
mailboxes = [] user_id=current_user.id,
for mailbox_id in mailbox_ids: custom_domain=custom_domain,
mailbox = Mailbox.get(mailbox_id) mailbox_ids=mailbox_ids,
if ( )
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(
url_for(
"dashboard.domain_detail", custom_domain_id=custom_domain.id
)
)
mailboxes.append(mailbox)
if not mailboxes: if result.success:
flash("You must select at least 1 mailbox", "warning") flash(f"{custom_domain.domain} mailboxes has been updated", "success")
return redirect( else:
url_for( flash(result.reason.value, "warning")
"dashboard.domain_detail", custom_domain_id=custom_domain.id
)
)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
Session.commit()
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
return redirect( return redirect(
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id) url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
@ -279,16 +257,8 @@ def domain_detail(custom_domain_id):
elif request.form.get("form-name") == "delete": elif request.form.get("form-name") == "delete":
name = custom_domain.domain name = custom_domain.domain
LOG.d("Schedule deleting %s", custom_domain)
# Schedule delete domain job delete_custom_domain(custom_domain)
LOG.w("schedule delete domain job for %s", custom_domain)
Job.create(
name=JOB_DELETE_DOMAIN,
payload={"custom_domain_id": custom_domain.id},
run_at=arrow.now(),
commit=True,
)
flash( flash(
f"{name} scheduled for deletion." f"{name} scheduled for deletion."

View File

@ -11,7 +11,7 @@ from app.dashboard.base import dashboard_bp
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import PartnerUser, SocialAuth from app.models import PartnerUser, SocialAuth
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
from app.utils import sanitize_next_url from app.utils import sanitize_next_url
_SUDO_GAP = 120 _SUDO_GAP = 120

View File

@ -71,7 +71,10 @@ def index():
page = 0 page = 0
if request.args.get("page"): if request.args.get("page"):
page = int(request.args.get("page")) try:
page = int(request.args.get("page"))
except ValueError:
pass
highlight_alias_id = None highlight_alias_id = None
if request.args.get("highlight_alias_id"): if request.args.get("highlight_alias_id"):
@ -145,11 +148,13 @@ def index():
LOG.i(f"User {current_user} requested deletion of alias {alias}") LOG.i(f"User {current_user} requested deletion of alias {alias}")
email = alias.email email = alias.email
alias_utils.delete_alias( alias_utils.delete_alias(
alias, current_user, AliasDeleteReason.ManualAction alias, current_user, AliasDeleteReason.ManualAction, commit=True
) )
flash(f"Alias {email} has been deleted", "success") flash(f"Alias {email} has been deleted", "success")
elif request.form.get("form-name") == "disable-alias": elif request.form.get("form-name") == "disable-alias":
alias_utils.change_alias_status(alias, enabled=False) alias_utils.change_alias_status(
alias, enabled=False, message="Set enabled=False from dashboard"
)
Session.commit() Session.commit()
flash(f"Alias {alias.email} has been disabled", "success") flash(f"Alias {alias.email} has been disabled", "success")

View File

@ -3,11 +3,9 @@ from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
from wtforms import StringField, validators from wtforms import StringField, validators
from app.config import ADMIN_EMAIL from app import parallel_limiter
from app.coupon_utils import redeem_lifetime_coupon
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import send_email
from app.models import LifetimeCoupon
class CouponForm(FlaskForm): class CouponForm(FlaskForm):
@ -16,6 +14,7 @@ class CouponForm(FlaskForm):
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"]) @dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
@login_required @login_required
@parallel_limiter.lock()
def lifetime_licence(): def lifetime_licence():
if current_user.lifetime: if current_user.lifetime:
flash("You already have a lifetime licence", "warning") flash("You already have a lifetime licence", "warning")
@ -32,28 +31,12 @@ def lifetime_licence():
if coupon_form.validate_on_submit(): if coupon_form.validate_on_submit():
code = coupon_form.code.data code = coupon_form.code.data
coupon = redeem_lifetime_coupon(code, current_user)
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=code) if coupon:
if coupon and coupon.nb_used > 0:
coupon.nb_used -= 1
current_user.lifetime = True
current_user.lifetime_coupon_id = coupon.id
if coupon.paid:
current_user.paid_lifetime = True
Session.commit()
# notify admin
send_email(
ADMIN_EMAIL,
subject=f"User {current_user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
plaintext="",
html="",
)
flash("You are upgraded to lifetime premium!", "success") flash("You are upgraded to lifetime premium!", "success")
return redirect(url_for("dashboard.index")) return redirect(url_for("dashboard.index"))
else: else:
flash(f"Code *{code}* expired or invalid", "warning") flash("Coupon code expired or invalid", "warning")
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form) return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)

View File

@ -1,8 +1,8 @@
import base64 import base64
import binascii import binascii
import json import json
from typing import Optional
import arrow
from flask import render_template, request, redirect, url_for, flash from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
@ -10,19 +10,13 @@ from itsdangerous import TimestampSigner
from wtforms import validators, IntegerField from wtforms import validators, IntegerField
from wtforms.fields.html5 import EmailField from wtforms.fields.html5 import EmailField
from app import parallel_limiter from app import parallel_limiter, mailbox_utils, user_settings
from app.config import MAILBOX_SECRET, URL, JOB_DELETE_MAILBOX from app.config import MAILBOX_SECRET
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.db import Session from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
mailbox_already_used,
render,
send_email,
)
from app.email_validation import is_valid_email
from app.log import LOG from app.log import LOG
from app.models import Mailbox, Job from app.models import Mailbox
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import CSRFValidationForm from app.utils import CSRFValidationForm
@ -58,120 +52,61 @@ def mailbox_route():
if not delete_mailbox_form.validate(): if not delete_mailbox_form.validate():
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(request.url) return redirect(request.url)
mailbox = Mailbox.get(delete_mailbox_form.mailbox_id.data) try:
mailbox = mailbox_utils.delete_mailbox(
if not mailbox or mailbox.user_id != current_user.id: current_user,
flash("Invalid mailbox. Refresh the page", "warning") delete_mailbox_form.mailbox_id.data,
delete_mailbox_form.transfer_mailbox_id.data,
)
except mailbox_utils.MailboxError as e:
flash(e.msg, "warning")
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("You cannot delete default mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
transfer_mailbox_id = delete_mailbox_form.transfer_mailbox_id.data
if transfer_mailbox_id and transfer_mailbox_id > 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
if not transfer_mailbox or transfer_mailbox.user_id != current_user.id:
flash(
"You must transfer the aliases to a mailbox you own.", "error"
)
return redirect(url_for("dashboard.mailbox_route"))
if transfer_mailbox.id == mailbox.id:
flash(
"You can not transfer the aliases to the mailbox you want to delete.",
"error",
)
return redirect(url_for("dashboard.mailbox_route"))
if not transfer_mailbox.verified:
flash("Your new mailbox is not verified", "error")
return redirect(url_for("dashboard.mailbox_route"))
# Schedule delete account job
LOG.w(
f"schedule delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id
if transfer_mailbox_id > 0
else None,
},
run_at=arrow.now(),
commit=True,
)
flash( flash(
f"Mailbox {mailbox.email} scheduled for deletion." f"Mailbox {mailbox.email} scheduled for deletion."
f"You will receive a confirmation email when the deletion is finished", f"You will receive a confirmation email when the deletion is finished",
"success", "success",
) )
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
if request.form.get("form-name") == "set-default": if request.form.get("form-name") == "set-default":
if not csrf_form.validate(): if not csrf_form.validate():
flash("Invalid request", "warning") flash("Invalid request", "warning")
return redirect(request.url) return redirect(request.url)
mailbox_id = request.form.get("mailbox_id") try:
mailbox = Mailbox.get(mailbox_id) mailbox_id = request.form.get("mailbox_id")
mailbox = user_settings.set_default_mailbox(current_user, mailbox_id)
if not mailbox or mailbox.user_id != current_user.id: except user_settings.CannotSetMailbox as e:
flash("Unknown error. Refresh the page", "warning") flash(e.msg, "warning")
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("This mailbox is already default one", "error")
return redirect(url_for("dashboard.mailbox_route"))
if not mailbox.verified:
flash("Cannot set unverified mailbox as default", "error")
return redirect(url_for("dashboard.mailbox_route"))
current_user.default_mailbox_id = mailbox.id
Session.commit()
flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success") flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success")
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
elif request.form.get("form-name") == "create": elif request.form.get("form-name") == "create":
if not current_user.is_premium(): if not new_mailbox_form.validate():
flash("Only premium plan can add additional mailbox", "warning") flash("Invalid request", "warning")
return redirect(request.url)
mailbox_email = new_mailbox_form.email.data.lower().strip().replace(" ", "")
try:
mailbox = mailbox_utils.create_mailbox(
current_user, mailbox_email
).mailbox
except mailbox_utils.MailboxError as e:
flash(e.msg, "warning")
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
if new_mailbox_form.validate(): flash(
mailbox_email = ( f"You are going to receive an email to confirm {mailbox.email}.",
new_mailbox_form.email.data.lower().strip().replace(" ", "") "success",
)
return redirect(
url_for(
"dashboard.mailbox_detail_route",
mailbox_id=mailbox.id,
) )
)
if not is_valid_email(mailbox_email):
flash(f"{mailbox_email} invalid", "error")
elif mailbox_already_used(mailbox_email, current_user):
flash(f"{mailbox_email} already used", "error")
elif not email_can_be_used_as_mailbox(mailbox_email):
flash(f"You cannot use {mailbox_email}.", "error")
else:
new_mailbox = Mailbox.create(
email=mailbox_email, user_id=current_user.id
)
Session.commit()
send_verification_email(current_user, new_mailbox)
flash(
f"You are going to receive an email to confirm {mailbox_email}.",
"success",
)
return redirect(
url_for(
"dashboard.mailbox_detail_route",
mailbox_id=new_mailbox.id,
)
)
return render_template( return render_template(
"dashboard/mailbox.html", "dashboard/mailbox.html",
@ -182,34 +117,31 @@ def mailbox_route():
) )
def send_verification_email(user, mailbox):
s = TimestampSigner(MAILBOX_SECRET)
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
b64_data = base64.urlsafe_b64encode(encoded_data)
mailbox_id_signed = s.sign(b64_data).decode()
verification_url = (
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
)
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
)
@dashboard_bp.route("/mailbox_verify") @dashboard_bp.route("/mailbox_verify")
@login_required
def mailbox_verify(): def mailbox_verify():
mailbox_id = request.args.get("mailbox_id")
if not mailbox_id:
LOG.i("Missing mailbox_id")
flash("You followed an invalid link", "error")
return redirect(url_for("dashboard.mailbox_route"))
code = request.args.get("code")
if not code:
# Old way
return verify_with_signed_secret(mailbox_id)
try:
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
except mailbox_utils.MailboxError as e:
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
flash(f"Cannot verify mailbox: {e.msg}", "error")
return redirect(url_for("dashboard.mailbox_route"))
LOG.d("Mailbox %s is verified", mailbox)
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
def verify_with_signed_secret(request: str):
s = TimestampSigner(MAILBOX_SECRET) s = TimestampSigner(MAILBOX_SECRET)
mailbox_verify_request = request.args.get("mailbox_id") mailbox_verify_request = request.args.get("mailbox_id")
try: try:
@ -227,7 +159,7 @@ def mailbox_verify():
flash("Invalid link. Please delete and re-add your mailbox", "error") flash("Invalid link. Please delete and re-add your mailbox", "error")
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
mailbox_id = mailbox_data[0] mailbox_id = mailbox_data[0]
mailbox = Mailbox.get(mailbox_id) mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
if not mailbox: if not mailbox:
flash("Invalid link", "error") flash("Invalid link", "error")
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
@ -237,6 +169,11 @@ def mailbox_verify():
return redirect(url_for("dashboard.mailbox_route")) return redirect(url_for("dashboard.mailbox_route"))
mailbox.verified = True mailbox.verified = True
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.VerifyMailbox,
message=f"Verified mailbox {mailbox.id} ({mailbox.email})",
)
Session.commit() Session.commit()
LOG.d("Mailbox %s is verified", mailbox) LOG.d("Mailbox %s is verified", mailbox)

View File

@ -1,30 +1,31 @@
from smtplib import SMTPRecipientsRefused
from email_validator import validate_email, EmailNotValidError from email_validator import validate_email, EmailNotValidError
from flask import render_template, request, redirect, url_for, flash from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user from flask_login import login_required, current_user
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
from itsdangerous import TimestampSigner from itsdangerous import TimestampSigner
from wtforms import validators from wtforms import validators
from wtforms.fields.html5 import EmailField from wtforms.fields.simple import StringField
from app import mailbox_utils
from app.config import ENFORCE_SPF, MAILBOX_SECRET from app.config import ENFORCE_SPF, MAILBOX_SECRET
from app.config import URL
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session from app.db import Session
from app.email_utils import email_can_be_used_as_mailbox
from app.email_utils import mailbox_already_used, render, send_email
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG from app.mailbox_utils import (
from app.models import Alias, AuthorizedAddress perform_mailbox_email_change,
MailboxEmailChangeError,
MailboxError,
)
from app.models import AuthorizedAddress
from app.models import Mailbox from app.models import Mailbox
from app.pgp_utils import PGPException, load_public_key_and_check from app.pgp_utils import PGPException, load_public_key_and_check
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import sanitize_email, CSRFValidationForm from app.utils import sanitize_email, CSRFValidationForm
class ChangeEmailForm(FlaskForm): class ChangeEmailForm(FlaskForm):
email = EmailField( email = StringField(
"email", validators=[validators.DataRequired(), validators.Email()] "email", validators=[validators.DataRequired(), validators.Email()]
) )
@ -55,41 +56,30 @@ def mailbox_detail_route(mailbox_id):
request.form.get("form-name") == "update-email" request.form.get("form-name") == "update-email"
and change_email_form.validate_on_submit() and change_email_form.validate_on_submit()
): ):
new_email = sanitize_email(change_email_form.email.data) try:
if new_email != mailbox.email and not pending_email: response = mailbox_utils.request_mailbox_email_change(
# check if this email is not already used current_user, mailbox, change_email_form.email.data
if mailbox_already_used(new_email, current_user) or Alias.get_by( )
email=new_email flash(
): f"You are going to receive an email to confirm {mailbox.email}.",
flash(f"Email {new_email} already used", "error") "success",
elif not email_can_be_used_as_mailbox(new_email): )
flash("You cannot use this email address as your mailbox", "error") except mailbox_utils.MailboxError as e:
else: flash(e.msg, "error")
mailbox.new_email = new_email return redirect(
Session.commit() url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
try:
verify_mailbox_change(current_user, mailbox, new_email)
except SMTPRecipientsRefused:
flash(
f"Incorrect mailbox, please recheck {mailbox.email}",
"error",
)
else:
flash(
f"You are going to receive an email to confirm {new_email}.",
"success",
)
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "force-spf": elif request.form.get("form-name") == "force-spf":
if not ENFORCE_SPF: if not ENFORCE_SPF:
flash("SPF enforcement globally not enabled", "error") flash("SPF enforcement globally not enabled", "error")
return redirect(url_for("dashboard.index")) return redirect(url_for("dashboard.index"))
mailbox.force_spf = ( force_spf_value = request.form.get("spf-status") == "on"
True if request.form.get("spf-status") == "on" else False mailbox.force_spf = force_spf_value
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Set force_spf to {force_spf_value} on mailbox {mailbox_id} ({mailbox.email})",
) )
Session.commit() Session.commit()
flash( flash(
@ -113,6 +103,11 @@ def mailbox_detail_route(mailbox_id):
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address): if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
flash(f"{address} already added", "error") flash(f"{address} already added", "error")
else: else:
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Add authorized address {address} to mailbox {mailbox_id} ({mailbox.email})",
)
AuthorizedAddress.create( AuthorizedAddress.create(
user_id=current_user.id, user_id=current_user.id,
mailbox_id=mailbox.id, mailbox_id=mailbox.id,
@ -133,6 +128,11 @@ def mailbox_detail_route(mailbox_id):
flash("Unknown error. Refresh the page", "warning") flash("Unknown error. Refresh the page", "warning")
else: else:
address = authorized_address.email address = authorized_address.email
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Remove authorized address {address} from mailbox {mailbox_id} ({mailbox.email})",
)
AuthorizedAddress.delete(authorized_address_id) AuthorizedAddress.delete(authorized_address_id)
Session.commit() Session.commit()
flash(f"{address} has been deleted", "success") flash(f"{address} has been deleted", "success")
@ -165,6 +165,11 @@ def mailbox_detail_route(mailbox_id):
except PGPException: except PGPException:
flash("Cannot add the public key, please verify it", "error") flash("Cannot add the public key, please verify it", "error")
else: else:
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Add PGP Key {mailbox.pgp_finger_print} to mailbox {mailbox_id} ({mailbox.email})",
)
Session.commit() Session.commit()
flash("Your PGP public key is saved successfully", "success") flash("Your PGP public key is saved successfully", "success")
return redirect( return redirect(
@ -172,6 +177,11 @@ def mailbox_detail_route(mailbox_id):
) )
elif request.form.get("action") == "remove": elif request.form.get("action") == "remove":
# Free user can decide to remove their added PGP key # Free user can decide to remove their added PGP key
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Remove PGP Key {mailbox.pgp_finger_print} from mailbox {mailbox_id} ({mailbox.email})",
)
mailbox.pgp_public_key = None mailbox.pgp_public_key = None
mailbox.pgp_finger_print = None mailbox.pgp_finger_print = None
mailbox.disable_pgp = False mailbox.disable_pgp = False
@ -191,9 +201,19 @@ def mailbox_detail_route(mailbox_id):
) )
else: else:
mailbox.disable_pgp = False mailbox.disable_pgp = False
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Enabled PGP for mailbox {mailbox_id} ({mailbox.email})",
)
flash(f"PGP is enabled on {mailbox.email}", "info") flash(f"PGP is enabled on {mailbox.email}", "info")
else: else:
mailbox.disable_pgp = True mailbox.disable_pgp = True
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Disabled PGP for mailbox {mailbox_id} ({mailbox.email})",
)
flash(f"PGP is disabled on {mailbox.email}", "info") flash(f"PGP is disabled on {mailbox.email}", "info")
Session.commit() Session.commit()
@ -203,6 +223,11 @@ def mailbox_detail_route(mailbox_id):
elif request.form.get("form-name") == "generic-subject": elif request.form.get("form-name") == "generic-subject":
if request.form.get("action") == "save": if request.form.get("action") == "save":
mailbox.generic_subject = request.form.get("generic-subject") mailbox.generic_subject = request.form.get("generic-subject")
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Set generic subject for mailbox {mailbox_id} ({mailbox.email})",
)
Session.commit() Session.commit()
flash("Generic subject is enabled", "success") flash("Generic subject is enabled", "success")
return redirect( return redirect(
@ -210,6 +235,11 @@ def mailbox_detail_route(mailbox_id):
) )
elif request.form.get("action") == "remove": elif request.form.get("action") == "remove":
mailbox.generic_subject = None mailbox.generic_subject = None
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Remove generic subject for mailbox {mailbox_id} ({mailbox.email})",
)
Session.commit() Session.commit()
flash("Generic subject is disabled", "success") flash("Generic subject is disabled", "success")
return redirect( return redirect(
@ -220,91 +250,57 @@ def mailbox_detail_route(mailbox_id):
return render_template("dashboard/mailbox_detail.html", **locals()) return render_template("dashboard/mailbox_detail.html", **locals())
def verify_mailbox_change(user, mailbox, new_email):
s = TimestampSigner(MAILBOX_SECRET)
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
verification_url = (
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
)
send_email(
new_email,
"Confirm mailbox change on SimpleLogin",
render(
"transactional/verify-mailbox-change.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
mailbox_new_email=new_email,
),
render(
"transactional/verify-mailbox-change.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
mailbox_new_email=new_email,
),
)
@dashboard_bp.route( @dashboard_bp.route(
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"] "/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
) )
@login_required @login_required
def cancel_mailbox_change_route(mailbox_id): def cancel_mailbox_change_route(mailbox_id):
mailbox = Mailbox.get(mailbox_id) try:
if not mailbox or mailbox.user_id != current_user.id: mailbox_utils.cancel_email_change(mailbox_id, current_user)
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
if mailbox.new_email:
mailbox.new_email = None
Session.commit()
flash("Your mailbox change is cancelled", "success") flash("Your mailbox change is cancelled", "success")
return redirect( return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id) url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
) )
else: except MailboxError as e:
flash("You have no pending mailbox change", "warning") flash(e.msg, "warning")
return redirect( return redirect(url_for("dashboard.index"))
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
@dashboard_bp.route("/mailbox/confirm_change") @dashboard_bp.route("/mailbox/confirm_change")
def mailbox_confirm_change_route(): @login_required
s = TimestampSigner(MAILBOX_SECRET) @limiter.limit("3/minute")
signed_mailbox_id = request.args.get("mailbox_id") def mailbox_confirm_email_change_route():
mailbox_id = request.args.get("mailbox_id")
try: code = request.args.get("code")
mailbox_id = int(s.unsign(signed_mailbox_id, max_age=900)) if code:
except Exception: try:
flash("Invalid link", "error") mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
return redirect(url_for("dashboard.index")) flash("Successfully changed mailbox email", "success")
else:
mailbox = Mailbox.get(mailbox_id)
# new_email can be None if user cancels change in the meantime
if mailbox and mailbox.new_email:
user = mailbox.user
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
flash(f"{mailbox.new_email} is already used", "error")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
)
mailbox.email = mailbox.new_email
mailbox.new_email = None
# mark mailbox as verified if the change request is sent from an unverified mailbox
mailbox.verified = True
Session.commit()
LOG.d("Mailbox change %s is verified", mailbox)
flash(f"The {mailbox.email} is updated", "success")
return redirect( return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id) url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
) )
else: except mailbox_utils.MailboxError as e:
flash(f"Cannot verify mailbox: {e.msg}", "error")
return redirect(url_for("dashboard.mailbox_route"))
else:
s = TimestampSigner(MAILBOX_SECRET)
try:
mailbox_id = int(s.unsign(mailbox_id, max_age=900))
res = perform_mailbox_email_change(mailbox_id)
flash(res.message, res.message_category)
if res.error:
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif res.error == MailboxEmailChangeError.InvalidId:
return redirect(url_for("dashboard.index"))
else:
raise Exception("Unhandled MailboxEmailChangeError")
except Exception:
flash("Invalid link", "error") flash("Invalid link", "error")
return redirect(url_for("dashboard.index")) return redirect(url_for("dashboard.index"))
flash("Successfully changed mailbox email", "success")
return redirect(url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id))

View File

@ -43,7 +43,10 @@ def notification_route(notification_id):
def notifications_route(): def notifications_route():
page = 0 page = 0
if request.args.get("page"): if request.args.get("page"):
page = int(request.args.get("page")) try:
page = int(request.args.get("page"))
except ValueError:
pass
notifications = ( notifications = (
Notification.filter_by(user_id=current_user.id) Notification.filter_by(user_id=current_user.id)

View File

@ -22,7 +22,7 @@ from app.models import (
PartnerUser, PartnerUser,
PartnerSubscription, PartnerSubscription,
) )
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
@dashboard_bp.route("/pricing", methods=["GET", "POST"]) @dashboard_bp.route("/pricing", methods=["GET", "POST"])

View File

@ -14,7 +14,7 @@ from flask_wtf import FlaskForm
from flask_wtf.file import FileField from flask_wtf.file import FileField
from wtforms import StringField, validators from wtforms import StringField, validators
from app import s3 from app import s3, user_settings
from app.config import ( from app.config import (
FIRST_ALIAS_DOMAIN, FIRST_ALIAS_DOMAIN,
ALIAS_RANDOM_SUFFIX_LENGTH, ALIAS_RANDOM_SUFFIX_LENGTH,
@ -31,19 +31,18 @@ from app.models import (
PlanEnum, PlanEnum,
File, File,
EmailChange, EmailChange,
CustomDomain,
AliasGeneratorEnum, AliasGeneratorEnum,
AliasSuffixEnum, AliasSuffixEnum,
ManualSubscription, ManualSubscription,
SenderFormatEnum, SenderFormatEnum,
SLDomain,
CoinbaseSubscription, CoinbaseSubscription,
AppleSubscription, AppleSubscription,
PartnerUser, PartnerUser,
PartnerSubscription, PartnerSubscription,
UnsubscribeBehaviourEnum, UnsubscribeBehaviourEnum,
) )
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
from app.proton.proton_unlink import can_unlink_proton_account
from app.utils import ( from app.utils import (
random_string, random_string,
CSRFValidationForm, CSRFValidationForm,
@ -166,44 +165,22 @@ def setting():
return redirect(url_for("dashboard.setting")) return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-random-alias-default-domain": elif request.form.get("form-name") == "change-random-alias-default-domain":
default_domain = request.form.get("random-alias-default-domain") default_domain = request.form.get("random-alias-default-domain")
try:
if default_domain: user_settings.set_default_alias_domain(current_user, default_domain)
sl_domain: SLDomain = SLDomain.get_by(domain=default_domain) except user_settings.CannotSetAlias as e:
if sl_domain: flash(e.msg, "error")
if sl_domain.premium_only and not current_user.is_premium(): return redirect(url_for("dashboard.setting"))
flash("You cannot use this domain", "error")
return redirect(url_for("dashboard.setting"))
current_user.default_alias_public_domain_id = sl_domain.id
current_user.default_alias_custom_domain_id = None
else:
custom_domain = CustomDomain.get_by(domain=default_domain)
if custom_domain:
# sanity check
if (
custom_domain.user_id != current_user.id
or not custom_domain.verified
):
LOG.w(
"%s cannot use domain %s", current_user, custom_domain
)
flash(f"Domain {default_domain} can't be used", "error")
return redirect(request.url)
else:
current_user.default_alias_custom_domain_id = (
custom_domain.id
)
current_user.default_alias_public_domain_id = None
else:
current_user.default_alias_custom_domain_id = None
current_user.default_alias_public_domain_id = None
Session.commit() Session.commit()
flash("Your preference has been updated", "success") flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting")) return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "random-alias-suffix": elif request.form.get("form-name") == "random-alias-suffix":
scheme = int(request.form.get("random-alias-suffix-generator")) try:
scheme = int(request.form.get("random-alias-suffix-generator"))
except ValueError:
flash("Invalid value", "error")
return redirect(url_for("dashboard.setting"))
if AliasSuffixEnum.has_value(scheme): if AliasSuffixEnum.has_value(scheme):
current_user.random_alias_suffix = scheme current_user.random_alias_suffix = scheme
Session.commit() Session.commit()
@ -347,4 +324,5 @@ def setting():
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH, ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
connect_with_proton=CONNECT_WITH_PROTON, connect_with_proton=CONNECT_WITH_PROTON,
proton_linked_account=proton_linked_account, proton_linked_account=proton_linked_account,
can_unlink_proton_account=can_unlink_proton_account(current_user),
) )

View File

@ -11,6 +11,7 @@ from app.dashboard.base import dashboard_bp
from app.errors import SubdomainInTrashError from app.errors import SubdomainInTrashError
from app.log import LOG from app.log import LOG
from app.models import CustomDomain, Mailbox, SLDomain from app.models import CustomDomain, Mailbox, SLDomain
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
# Only lowercase letters, numbers, dashes (-) are currently supported # Only lowercase letters, numbers, dashes (-) are currently supported
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}" _SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
@ -102,6 +103,12 @@ def subdomain_route():
ownership_verified=True, ownership_verified=True,
commit=True, commit=True,
) )
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.CreateCustomDomain,
message=f"Create subdomain {new_custom_domain.id} ({full_domain})",
commit=True,
)
except SubdomainInTrashError: except SubdomainInTrashError:
flash( flash(
f"{full_domain} has been used before and cannot be reused", f"{full_domain} has been used before and cannot be reused",

View File

@ -32,7 +32,9 @@ def unsubscribe(alias_id):
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058 # automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
if request.method == "POST": if request.method == "POST":
alias_utils.change_alias_status(alias, False) alias_utils.change_alias_status(
alias, enabled=False, message="Set enabled=False from unsubscribe request"
)
flash(f"Alias {alias.email} has been blocked", "success") flash(f"Alias {alias.email} has been blocked", "success")
Session.commit() Session.commit()

View File

@ -1,4 +1,5 @@
from io import BytesIO from io import BytesIO
from urllib.parse import urlparse
from flask import request, render_template, redirect, url_for, flash from flask import request, render_template, redirect, url_for, flash
from flask_login import current_user, login_required from flask_login import current_user, login_required
@ -11,6 +12,7 @@ from app.config import ADMIN_EMAIL
from app.db import Session from app.db import Session
from app.developer.base import developer_bp from app.developer.base import developer_bp
from app.email_utils import send_email from app.email_utils import send_email
from app.image_validation import detect_image_format, ImageFormat
from app.log import LOG from app.log import LOG
from app.models import Client, RedirectUri, File, Referral from app.models import Client, RedirectUri, File, Referral
from app.utils import random_string from app.utils import random_string
@ -46,16 +48,25 @@ def client_detail(client_id):
approval_form.description.data = client.description approval_form.description.data = client.description
if action == "edit" and form.validate_on_submit(): if action == "edit" and form.validate_on_submit():
parsed_url = urlparse(form.url.data)
if parsed_url.scheme != "https":
flash("Only https urls are allowed", "error")
return redirect(url_for("developer.index"))
client.name = form.name.data client.name = form.name.data
client.home_url = form.url.data client.home_url = form.url.data
if form.icon.data: if form.icon.data:
# todo: remove current icon if any icon_data = form.icon.data.read(10240)
# todo: handle remove icon if detect_image_format(icon_data) == ImageFormat.Unknown:
flash("Unknown file format", "warning")
return redirect(url_for("developer.index"))
if client.icon:
s3.delete(client.icon_id)
File.delete(client.icon)
file_path = random_string(30) file_path = random_string(30)
file = File.create(path=file_path, user_id=client.user_id) file = File.create(path=file_path, user_id=client.user_id)
s3.upload_from_bytesio(file_path, BytesIO(form.icon.data.read())) s3.upload_from_bytesio(file_path, BytesIO(icon_data))
Session.flush() Session.flush()
LOG.d("upload file %s to s3", file) LOG.d("upload file %s to s3", file)

View File

@ -1,4 +1,5 @@
"""List of clients""" """List of clients"""
from flask import render_template from flask import render_template
from flask_login import current_user, login_required from flask_login import current_user, login_required

View File

@ -1,3 +1,5 @@
from urllib.parse import urlparse
from flask import render_template, redirect, url_for, flash from flask import render_template, redirect, url_for, flash
from flask_login import current_user, login_required from flask_login import current_user, login_required
from flask_wtf import FlaskForm from flask_wtf import FlaskForm
@ -20,6 +22,10 @@ def new_client():
if form.validate_on_submit(): if form.validate_on_submit():
client = Client.create_new(form.name.data, current_user.id) client = Client.create_new(form.name.data, current_user.id)
parsed_url = urlparse(form.url.data)
if parsed_url.scheme != "https":
flash("Only https urls are allowed", "error")
return redirect(url_for("developer.new_client"))
client.home_url = form.url.data client.home_url = form.url.data
Session.commit() Session.commit()

View File

@ -1,120 +1,134 @@
from app import config from abc import ABC, abstractmethod
from typing import Optional, List, Tuple from typing import List, Optional
import dns.resolver import dns.resolver
from app.config import NAMESERVERS
def _get_dns_resolver():
my_resolver = dns.resolver.Resolver()
my_resolver.nameservers = config.NAMESERVERS
return my_resolver
def get_ns(hostname) -> [str]:
try:
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
except Exception:
return []
return [a.to_text() for a in answers]
def get_cname_record(hostname) -> Optional[str]:
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
try:
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
except Exception:
return None
for a in answers:
ret = a.to_text()
return ret[:-1]
return None
def get_mx_domains(hostname) -> [(int, str)]:
"""return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
try:
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
except Exception:
return []
ret = []
for a in answers:
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
parts = record.split(" ")
ret.append((int(parts[0]), parts[1]))
return sorted(ret, key=lambda prio_domain: prio_domain[0])
_include_spf = "include:" _include_spf = "include:"
def get_spf_domain(hostname) -> [str]: class DNSClient(ABC):
"""return all domains listed in *include:*""" @abstractmethod
try: def get_cname_record(self, hostname: str) -> Optional[str]:
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True) pass
except Exception:
return []
ret = [] @abstractmethod
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
pass
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT def get_spf_domain(self, hostname: str) -> List[str]:
for record in a.strings: """
record = record.decode() # record is bytes return all domains listed in *include:*
"""
try:
records = self.get_txt_record(hostname)
ret = []
for record in records:
if record.startswith("v=spf1"):
parts = record.split(" ")
for part in parts:
if part.startswith(_include_spf):
ret.append(
part[part.find(_include_spf) + len(_include_spf) :]
)
return ret
except Exception:
return []
if record.startswith("v=spf1"): @abstractmethod
def get_txt_record(self, hostname: str) -> List[str]:
pass
class NetworkDNSClient(DNSClient):
def __init__(self, nameservers: List[str]):
self._resolver = dns.resolver.Resolver()
self._resolver.nameservers = nameservers
def get_cname_record(self, hostname: str) -> Optional[str]:
"""
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
"""
try:
answers = self._resolver.resolve(hostname, "CNAME", search=True)
for a in answers:
ret = a.to_text()
return ret[:-1]
except Exception:
return None
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
"""
return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
ret = {}
try:
answers = self._resolver.resolve(hostname, "MX", search=True)
for a in answers:
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
parts = record.split(" ") parts = record.split(" ")
for part in parts: prio = int(parts[0])
if part.startswith(_include_spf): if prio not in ret:
ret.append(part[part.find(_include_spf) + len(_include_spf) :]) ret[prio] = []
ret[prio].append(parts[1])
except Exception:
pass
return ret
return ret def get_txt_record(self, hostname: str) -> List[str]:
try:
answers = self._resolver.resolve(hostname, "TXT", search=False)
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
ret.append(record.decode())
return ret
except Exception:
return []
def get_txt_record(hostname) -> [str]: class InMemoryDNSClient(DNSClient):
try: def __init__(self):
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True) self.cname_records: dict[str, Optional[str]] = {}
except Exception: self.mx_records: dict[int, dict[int, list[str]]] = {}
return [] self.spf_records: dict[str, List[str]] = {}
self.txt_records: dict[str, List[str]] = {}
ret = [] def set_cname_record(self, hostname: str, cname: str):
self.cname_records[hostname] = cname
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT def set_mx_records(self, hostname: str, mx_list: dict[int, list[str]]):
for record in a.strings: self.mx_records[hostname] = mx_list
record = record.decode() # record is bytes
ret.append(record) def set_txt_record(self, hostname: str, txt_list: List[str]):
self.txt_records[hostname] = txt_list
return ret def get_cname_record(self, hostname: str) -> Optional[str]:
return self.cname_records.get(hostname)
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
return self.mx_records.get(hostname, {})
def get_txt_record(self, hostname: str) -> List[str]:
return self.txt_records.get(hostname, [])
def is_mx_equivalent( global_dns_client: Optional[DNSClient] = None
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
) -> bool:
"""
Compare mx_domains with ref_mx_domains to see if they are equivalent.
mx_domains and ref_mx_domains are list of (priority, domain)
The priority order is taken into account but not the priority number.
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
"""
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
ref_mx_domains = sorted(
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
)
if len(mx_domains) < len(ref_mx_domains): def get_network_dns_client() -> DNSClient:
return False global global_dns_client
if global_dns_client is not None:
return global_dns_client
return NetworkDNSClient(NAMESERVERS)
for i in range(0, len(ref_mx_domains)):
if mx_domains[i][1] != ref_mx_domains[i][1]:
return False
return True def set_global_dns_client(dns_client: Optional[DNSClient]):
global global_dns_client
global_dns_client = dns_client
def get_mx_domains(hostname: str) -> dict[int, list[str]]:
return get_network_dns_client().get_mx_domains(hostname)

View File

@ -1,4 +1,5 @@
"""Email headers""" """Email headers"""
MESSAGE_ID = "Message-ID" MESSAGE_ID = "Message-ID"
IN_REPLY_TO = "In-Reply-To" IN_REPLY_TO = "In-Reply-To"
REFERENCES = "References" REFERENCES = "References"

View File

@ -548,7 +548,9 @@ def can_create_directory_for_address(email_address: str) -> bool:
for domain in config.ALIAS_DOMAINS: for domain in config.ALIAS_DOMAINS:
if email_address.endswith("@" + domain): if email_address.endswith("@" + domain):
return True return True
LOG.i(
f"Cannot create address in directory for {email_address} since it does not belong to a valid directory domain"
)
return False return False
@ -590,7 +592,7 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
from app.models import CustomDomain from app.models import CustomDomain
if CustomDomain.get_by(domain=domain, verified=True): if CustomDomain.get_by(domain=domain, is_sl_subdomain=True, verified=True):
LOG.d("domain %s is a SimpleLogin custom domain", domain) LOG.d("domain %s is a SimpleLogin custom domain", domain)
return False return False
@ -655,7 +657,11 @@ def get_mx_domain_list(domain) -> [str]:
""" """
priority_domains = get_mx_domains(domain) priority_domains = get_mx_domains(domain)
return [d[:-1] for _, d in priority_domains] mx_domains = []
for prio in priority_domains:
for domain in priority_domains[prio]:
mx_domains.append(domain[:-1])
return mx_domains
def personal_email_already_used(email_address: str) -> bool: def personal_email_already_used(email_address: str) -> bool:
@ -1343,17 +1349,18 @@ def get_queue_id(msg: Message) -> Optional[str]:
received_header = str(msg[headers.RECEIVED]) received_header = str(msg[headers.RECEIVED])
if not received_header: if not received_header:
return return None
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)' # received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
search_result = re.search("with ESMTPS id [0-9a-zA-Z]{1,}", received_header) search_result = re.search(r"with E?SMTP[AS]? id ([0-9a-zA-Z]{1,})", received_header)
if not search_result: if search_result:
return return search_result.group(1)
search_result = re.search(
# the "with ESMTPS id 4FxQmw1DXdz2vK2" part r"\(Postfix\)\r\n\tid ([a-zA-Z0-9]{1,});", received_header
with_esmtps = received_header[search_result.start() : search_result.end()] )
if search_result:
return with_esmtps[len("with ESMTPS id ") :] return search_result.group(1)
return None
def should_ignore_bounce(mail_from: str) -> bool: def should_ignore_bounce(mail_from: str) -> bool:

View File

@ -1,10 +1,14 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
import newrelic.agent
from app import config from app import config
from app.db import Session from app.db import Session
from app.errors import ProtonPartnerNotSetUp from app.errors import ProtonPartnerNotSetUp
from app.events.generated import event_pb2 from app.events.generated import event_pb2
from app.log import LOG
from app.models import User, PartnerUser, SyncEvent from app.models import User, PartnerUser, SyncEvent
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
from typing import Optional from typing import Optional
NOTIFICATION_CHANNEL = "simplelogin_sync_events" NOTIFICATION_CHANNEL = "simplelogin_sync_events"
@ -26,22 +30,43 @@ class PostgresDispatcher(Dispatcher):
return PostgresDispatcher() return PostgresDispatcher()
class GlobalDispatcher:
__dispatcher: Optional[Dispatcher] = None
@staticmethod
def get_dispatcher() -> Dispatcher:
if not GlobalDispatcher.__dispatcher:
GlobalDispatcher.__dispatcher = PostgresDispatcher.get()
return GlobalDispatcher.__dispatcher
@staticmethod
def set_dispatcher(dispatcher: Optional[Dispatcher]):
GlobalDispatcher.__dispatcher = dispatcher
class EventDispatcher: class EventDispatcher:
@staticmethod @staticmethod
def send_event( def send_event(
user: User, user: User,
content: event_pb2.EventContent, content: event_pb2.EventContent,
dispatcher: Dispatcher = PostgresDispatcher.get(), dispatcher: Optional[Dispatcher] = None,
skip_if_webhook_missing: bool = True, skip_if_webhook_missing: bool = True,
): ):
if dispatcher is None:
dispatcher = GlobalDispatcher.get_dispatcher()
if config.EVENT_WEBHOOK_DISABLE: if config.EVENT_WEBHOOK_DISABLE:
LOG.i("Not sending events because webhook is disabled")
return return
if not config.EVENT_WEBHOOK and skip_if_webhook_missing: if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
LOG.i(
"Not sending events because webhook is not configured and allowed to be empty"
)
return return
partner_user = EventDispatcher.__partner_user(user.id) partner_user = EventDispatcher.__partner_user(user.id)
if not partner_user: if not partner_user:
LOG.i(f"Not sending events because there's no partner user for user {user}")
return return
event = event_pb2.Event( event = event_pb2.Event(
@ -54,6 +79,10 @@ class EventDispatcher:
serialized = event.SerializeToString() serialized = event.SerializeToString()
dispatcher.send(serialized) dispatcher.send(serialized)
event_type = content.WhichOneof("content")
newrelic.agent.record_custom_event("EventStoredToDb", {"type": event_type})
LOG.i("Sent event to the dispatcher")
@staticmethod @staticmethod
def __partner_user(user_id: int) -> Optional[PartnerUser]: def __partner_user(user_id: int) -> Optional[PartnerUser]:
# Check if the current user has a partner_id # Check if the current user has a partner_id

View File

@ -24,7 +24,7 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3') DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\":\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\x12\x10\n\x08lifetime\x18\x02 \x01(\x08\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x0e\n\x0cUserUnlinked\"\xce\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x12\x39\n\ruser_unlinked\x18\x07 \x01(\x0b\x32 .simplelogin_events.UserUnlinkedH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
_globals = globals() _globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@ -32,19 +32,21 @@ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS: if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None DESCRIPTOR._loaded_options = None
_globals['_USERPLANCHANGED']._serialized_start=35 _globals['_USERPLANCHANGED']._serialized_start=35
_globals['_USERPLANCHANGED']._serialized_end=75 _globals['_USERPLANCHANGED']._serialized_end=93
_globals['_USERDELETED']._serialized_start=77 _globals['_USERDELETED']._serialized_start=95
_globals['_USERDELETED']._serialized_end=90 _globals['_USERDELETED']._serialized_end=108
_globals['_ALIASCREATED']._serialized_start=92 _globals['_ALIASCREATED']._serialized_start=110
_globals['_ALIASCREATED']._serialized_end=182 _globals['_ALIASCREATED']._serialized_end=202
_globals['_ALIASSTATUSCHANGED']._serialized_start=184 _globals['_ALIASSTATUSCHANGED']._serialized_start=204
_globals['_ALIASSTATUSCHANGED']._serialized_end=260 _globals['_ALIASSTATUSCHANGED']._serialized_end=288
_globals['_ALIASDELETED']._serialized_start=262 _globals['_ALIASDELETED']._serialized_start=290
_globals['_ALIASDELETED']._serialized_end=315 _globals['_ALIASDELETED']._serialized_end=331
_globals['_ALIASCREATEDLIST']._serialized_start=317 _globals['_ALIASCREATEDLIST']._serialized_start=333
_globals['_ALIASCREATEDLIST']._serialized_end=385 _globals['_ALIASCREATEDLIST']._serialized_end=401
_globals['_EVENTCONTENT']._serialized_start=388 _globals['_USERUNLINKED']._serialized_start=403
_globals['_EVENTCONTENT']._serialized_end=791 _globals['_USERUNLINKED']._serialized_end=417
_globals['_EVENT']._serialized_start=793 _globals['_EVENTCONTENT']._serialized_start=420
_globals['_EVENT']._serialized_end=914 _globals['_EVENTCONTENT']._serialized_end=882
_globals['_EVENT']._serialized_start=884
_globals['_EVENT']._serialized_end=1005
# @@protoc_insertion_point(module_scope) # @@protoc_insertion_point(module_scope)

View File

@ -6,44 +6,50 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
DESCRIPTOR: _descriptor.FileDescriptor DESCRIPTOR: _descriptor.FileDescriptor
class UserPlanChanged(_message.Message): class UserPlanChanged(_message.Message):
__slots__ = ("plan_end_time",) __slots__ = ("plan_end_time", "lifetime")
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int] PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
LIFETIME_FIELD_NUMBER: _ClassVar[int]
plan_end_time: int plan_end_time: int
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ... lifetime: bool
def __init__(self, plan_end_time: _Optional[int] = ..., lifetime: bool = ...) -> None: ...
class UserDeleted(_message.Message): class UserDeleted(_message.Message):
__slots__ = () __slots__ = ()
def __init__(self) -> None: ... def __init__(self) -> None: ...
class AliasCreated(_message.Message): class AliasCreated(_message.Message):
__slots__ = ("alias_id", "alias_email", "alias_note", "enabled") __slots__ = ("id", "email", "note", "enabled", "created_at")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int] EMAIL_FIELD_NUMBER: _ClassVar[int]
ALIAS_NOTE_FIELD_NUMBER: _ClassVar[int] NOTE_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int] ENABLED_FIELD_NUMBER: _ClassVar[int]
alias_id: int CREATED_AT_FIELD_NUMBER: _ClassVar[int]
alias_email: str id: int
alias_note: str email: str
note: str
enabled: bool enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ... created_at: int
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., note: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
class AliasStatusChanged(_message.Message): class AliasStatusChanged(_message.Message):
__slots__ = ("alias_id", "alias_email", "enabled") __slots__ = ("id", "email", "enabled", "created_at")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int] EMAIL_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int] ENABLED_FIELD_NUMBER: _ClassVar[int]
alias_id: int CREATED_AT_FIELD_NUMBER: _ClassVar[int]
alias_email: str id: int
email: str
enabled: bool enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., enabled: bool = ...) -> None: ... created_at: int
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
class AliasDeleted(_message.Message): class AliasDeleted(_message.Message):
__slots__ = ("alias_id", "alias_email") __slots__ = ("id", "email")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int] EMAIL_FIELD_NUMBER: _ClassVar[int]
alias_id: int id: int
alias_email: str email: str
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ... def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ...) -> None: ...
class AliasCreatedList(_message.Message): class AliasCreatedList(_message.Message):
__slots__ = ("events",) __slots__ = ("events",)
@ -51,21 +57,27 @@ class AliasCreatedList(_message.Message):
events: _containers.RepeatedCompositeFieldContainer[AliasCreated] events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ... def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
class UserUnlinked(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...
class EventContent(_message.Message): class EventContent(_message.Message):
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list") __slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list", "user_unlinked")
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int] USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
USER_DELETED_FIELD_NUMBER: _ClassVar[int] USER_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int] ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int] ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int] ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int] ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
USER_UNLINKED_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChanged user_plan_change: UserPlanChanged
user_deleted: UserDeleted user_deleted: UserDeleted
alias_created: AliasCreated alias_created: AliasCreated
alias_status_change: AliasStatusChanged alias_status_change: AliasStatusChanged
alias_deleted: AliasDeleted alias_deleted: AliasDeleted
alias_create_list: AliasCreatedList alias_create_list: AliasCreatedList
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ... user_unlinked: UserUnlinked
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ..., user_unlinked: _Optional[_Union[UserUnlinked, _Mapping]] = ...) -> None: ...
class Event(_message.Message): class Event(_message.Message):
__slots__ = ("user_id", "external_user_id", "partner_id", "content") __slots__ = ("user_id", "external_user_id", "partner_id", "content")

View File

@ -33,8 +33,11 @@ from app.models import (
SLDomain, SLDomain,
Hibp, Hibp,
AliasHibp, AliasHibp,
PartnerUser,
PartnerSubscription,
) )
from app.pgp_utils import load_public_key from app.pgp_utils import load_public_key
from app.proton.proton_partner import get_proton_partner
def fake_data(): def fake_data():
@ -87,7 +90,7 @@ def fake_data():
user_id=user.id, user_id=user.id,
alias_id=alias.id, alias_id=alias.id,
website_email="hey@google.com", website_email="hey@google.com",
reply_email="rep@sl.local", reply_email="rep@sl.lan",
commit=True, commit=True,
) )
EmailLog.create( EmailLog.create(
@ -163,7 +166,7 @@ def fake_data():
# user_id=user.id, # user_id=user.id,
# alias_id=a.id, # alias_id=a.id,
# website_email=f"contact{i}@example.com", # website_email=f"contact{i}@example.com",
# reply_email=f"rep{i}@sl.local", # reply_email=f"rep{i}@sl.lan",
# ) # )
# Session.commit() # Session.commit()
# for _ in range(3): # for _ in range(3):
@ -269,3 +272,27 @@ def fake_data():
CustomDomain.create( CustomDomain.create(
user_id=user.id, domain="old.com", verified=True, ownership_verified=True user_id=user.id, domain="old.com", verified=True, ownership_verified=True
) )
# Create a user
proton_partner = get_proton_partner()
user = User.create(
email="test@proton.me",
name="Proton test",
password="password",
activated=True,
is_admin=False,
intro_shown=True,
from_partner=True,
flush=True,
)
pu = PartnerUser.create(
user_id=user.id,
partner_id=proton_partner.id,
partner_email="test@proton.me",
external_user_id="DUMMY",
flush=True,
)
PartnerSubscription.create(
partner_user_id=pu.id, end_at=arrow.now().shift(years=1, days=1)
)
Session.commit()

View File

@ -2,8 +2,8 @@ import urllib
from email.header import Header from email.header import Header
from email.message import Message from email.message import Message
from app.email import headers
from app import config from app import config
from app.email import headers
from app.email_utils import add_or_replace_header, delete_header from app.email_utils import add_or_replace_header, delete_header
from app.handler.unsubscribe_encoder import ( from app.handler.unsubscribe_encoder import (
UnsubscribeEncoder, UnsubscribeEncoder,
@ -46,7 +46,11 @@ class UnsubscribeGenerator:
if start == -1 or end == -1 or start >= end: if start == -1 or end == -1 or start >= end:
continue continue
method = raw_method[start + 1 : end] method = raw_method[start + 1 : end]
url_data = urllib.parse.urlparse(method) try:
url_data = urllib.parse.urlparse(method)
except ValueError:
LOG.debug(f"Unsub has invalid method {method}. Ignoring.")
continue
if url_data.scheme == "mailto": if url_data.scheme == "mailto":
if url_data.path == config.UNSUBSCRIBER: if url_data.path == config.UNSUBSCRIBER:
LOG.debug( LOG.debug(

View File

@ -103,7 +103,9 @@ class UnsubscribeHandler:
): ):
return status.E509 return status.E509
LOG.i(f"User disabled alias {alias} via unsubscribe header") LOG.i(f"User disabled alias {alias} via unsubscribe header")
alias_utils.change_alias_status(alias, enabled=False) alias_utils.change_alias_status(
alias, enabled=False, message="Set enabled=False via unsubscribe header"
)
Session.commit() Session.commit()
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}" enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
for mailbox in alias.mailboxes: for mailbox in alias.mailboxes:

View File

@ -1,3 +1,5 @@
import newrelic.agent
from app.events.event_dispatcher import EventDispatcher, Dispatcher from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
from app.log import LOG from app.log import LOG
@ -12,6 +14,7 @@ def send_alias_creation_events_for_user(
return return
chunk_size = min(chunk_size, 50) chunk_size = min(chunk_size, 50)
event_list = [] event_list = []
LOG.i("Sending alias create events for user {user}")
for alias in ( for alias in (
Alias.yield_per_query(chunk_size) Alias.yield_per_query(chunk_size)
.filter_by(user_id=user.id) .filter_by(user_id=user.id)
@ -19,22 +22,31 @@ def send_alias_creation_events_for_user(
): ):
event_list.append( event_list.append(
AliasCreated( AliasCreated(
alias_id=alias.id, id=alias.id,
alias_email=alias.email, email=alias.email,
alias_note=alias.note, note=alias.note,
enabled=alias.enabled, enabled=alias.enabled,
created_at=int(alias.created_at.timestamp),
) )
) )
if len(event_list) >= chunk_size: if len(event_list) >= chunk_size:
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
EventDispatcher.send_event( EventDispatcher.send_event(
user, user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)), EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher, dispatcher=dispatcher,
) )
newrelic.agent.record_custom_metric(
"Custom/event_alias_created_event", len(event_list)
)
event_list = [] event_list = []
if len(event_list) > 0: if len(event_list) > 0:
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
EventDispatcher.send_event( EventDispatcher.send_event(
user, user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)), EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher, dispatcher=dispatcher,
) )
newrelic.agent.record_custom_metric(
"Custom/event_alias_created_event", len(event_list)
)

View File

@ -12,6 +12,7 @@ import arrow
import sqlalchemy import sqlalchemy
from app import config from app import config
from app.constants import JobType
from app.db import Session from app.db import Session
from app.email import headers from app.email import headers
from app.email_utils import ( from app.email_utils import (
@ -174,7 +175,7 @@ class ExportUserDataJob:
jobs_in_db = ( jobs_in_db = (
Session.query(Job) Session.query(Job)
.filter( .filter(
Job.name == config.JOB_SEND_USER_REPORT, Job.name == JobType.SEND_USER_REPORT.value,
Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT) Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT)
== str(self._user.id), == str(self._user.id),
Job.taken.is_(False), Job.taken.is_(False),
@ -184,7 +185,7 @@ class ExportUserDataJob:
if jobs_in_db > 0: if jobs_in_db > 0:
return None return None
return Job.create( return Job.create(
name=config.JOB_SEND_USER_REPORT, name=JobType.SEND_USER_REPORT.value,
payload={"user_id": self._user.id}, payload={"user_id": self._user.id},
run_at=arrow.now(), run_at=arrow.now(),
commit=True, commit=True,

View File

@ -0,0 +1,72 @@
from __future__ import annotations
import base64
from typing import Optional
import arrow
from app.constants import JobType
from app.errors import ProtonPartnerNotSetUp
from app.events.generated import event_pb2
from app.events.generated.event_pb2 import EventContent
from app.models import (
User,
Job,
PartnerUser,
)
from app.proton.proton_partner import get_proton_partner
from events.event_sink import EventSink
class SendEventToWebhookJob:
def __init__(self, user: User, event: EventContent):
self._user: User = user
self._event: EventContent = event
def run(self, sink: EventSink) -> bool:
# Check if the current user has a partner_id
try:
proton_partner_id = get_proton_partner().id
except ProtonPartnerNotSetUp:
return False
# It has. Retrieve the information for the PartnerUser
partner_user = PartnerUser.get_by(
user_id=self._user.id, partner_id=proton_partner_id
)
if partner_user is None:
return True
event = event_pb2.Event(
user_id=self._user.id,
external_user_id=partner_user.external_user_id,
partner_id=partner_user.partner_id,
content=self._event,
)
serialized = event.SerializeToString()
return sink.send_data_to_webhook(serialized)
@staticmethod
def create_from_job(job: Job) -> Optional[SendEventToWebhookJob]:
user = User.get(job.payload["user_id"])
if not user:
return None
event_data = base64.b64decode(job.payload["event"])
event = event_pb2.EventContent()
event.ParseFromString(event_data)
return SendEventToWebhookJob(user=user, event=event)
def store_job_in_db(
self, run_at: Optional[arrow.Arrow], commit: bool = True
) -> Job:
stub = self._event.SerializeToString()
return Job.create(
name=JobType.SEND_EVENT_TO_WEBHOOK.value,
payload={
"user_id": self._user.id,
"event": base64.b64encode(stub).decode("utf-8"),
},
run_at=run_at if run_at is not None else arrow.now(),
commit=commit,
)

View File

@ -10,7 +10,7 @@ from app.config import (
# this format allows clickable link to code source in PyCharm # this format allows clickable link to code source in PyCharm
_log_format = ( _log_format = (
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - " "%(asctime)s - %(name)s - %(levelname)s - %(process)d - %(request_id)s"
'"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s' '"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s'
) )
_log_formatter = logging.Formatter(_log_format) _log_formatter = logging.Formatter(_log_format)
@ -37,6 +37,21 @@ class EmailHandlerFilter(logging.Filter):
return _MESSAGE_ID return _MESSAGE_ID
class RequestIdFilter(logging.Filter):
"""automatically add request-id to keep track of a request"""
def filter(self, record):
from flask import g, has_request_context
request_id = ""
if has_request_context() and hasattr(g, "request_id"):
ctx_request_id = getattr(g, "request_id")
if ctx_request_id:
request_id = f"{ctx_request_id} - "
record.request_id = request_id
return True
def _get_console_handler(): def _get_console_handler():
console_handler = logging.StreamHandler(sys.stdout) console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(_log_formatter) console_handler.setFormatter(_log_formatter)
@ -54,6 +69,7 @@ def _get_logger(name) -> logging.Logger:
logger.addHandler(_get_console_handler()) logger.addHandler(_get_console_handler())
logger.addFilter(EmailHandlerFilter()) logger.addFilter(EmailHandlerFilter())
logger.addFilter(RequestIdFilter())
# no propagation to avoid propagating to root logger # no propagation to avoid propagating to root logger
logger.propagate = False logger.propagate = False

510
app/app/mailbox_utils.py Normal file
View File

@ -0,0 +1,510 @@
import dataclasses
import secrets
from enum import Enum
from typing import Optional
import arrow
from sqlalchemy.exc import IntegrityError
from app import config
from app.constants import JobType
from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
send_email,
render,
get_email_domain_part,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import User, Mailbox, Job, MailboxActivation, Alias
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import canonicalize_email, sanitize_email
@dataclasses.dataclass
class CreateMailboxOutput:
mailbox: Mailbox
activation: Optional[MailboxActivation]
class MailboxError(Exception):
def __init__(self, msg: str):
self.msg = msg
class OnlyPaidError(MailboxError):
def __init__(self):
self.msg = "Only available for paid plans"
class CannotVerifyError(MailboxError):
def __init__(self, msg: str, deleted_activation_code: bool = False):
self.msg = msg
self.deleted_activation_code = deleted_activation_code
MAX_ACTIVATION_TRIES = 3
def create_mailbox(
user: User,
email: str,
verified: bool = False,
send_email: bool = True,
use_digit_codes: bool = False,
send_link: bool = True,
) -> CreateMailboxOutput:
email = sanitize_email(email)
if not user.is_premium():
LOG.i(
f"User {user} has tried to create mailbox with {email} but is not premium"
)
raise OnlyPaidError()
check_email_for_mailbox(email, user)
new_mailbox: Mailbox = Mailbox.create(
email=email, user_id=user.id, verified=verified, commit=True
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.CreateMailbox,
message=f"Create mailbox {new_mailbox.id} ({new_mailbox.email}). Verified={verified}",
commit=True,
)
if verified:
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
return CreateMailboxOutput(mailbox=new_mailbox, activation=None)
LOG.i(f"User {user} has created mailbox with {email}")
activation = generate_activation_code(new_mailbox, use_digit_code=use_digit_codes)
output = CreateMailboxOutput(mailbox=new_mailbox, activation=activation)
if not send_email:
LOG.i(f"Skipping sending validation email for mailbox {new_mailbox}")
return output
send_verification_email(
user,
new_mailbox,
activation=activation,
send_link=send_link,
)
return output
def check_email_for_mailbox(email, user):
if not is_valid_email(email):
LOG.i(
f"User {user} has tried to create mailbox with {email} but is not valid email"
)
raise MailboxError("Invalid email")
elif mailbox_already_used(email, user):
LOG.i(
f"User {user} has tried to create mailbox with {email} but email is already used"
)
raise MailboxError("Email already used")
elif not email_can_be_used_as_mailbox(email):
LOG.i(
f"User {user} has tried to create mailbox with {email} but email is invalid"
)
raise MailboxError("Invalid email")
def delete_mailbox(
user: User,
mailbox_id: int,
transfer_mailbox_id: Optional[int],
send_mail: bool = True,
) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
LOG.i(
f"User {user} has tried to delete another user's mailbox with {mailbox_id}"
)
raise MailboxError("Invalid mailbox")
if mailbox.id == user.default_mailbox_id:
LOG.i(f"User {user} has tried to delete the default mailbox")
raise MailboxError("Cannot delete your default mailbox")
if transfer_mailbox_id and transfer_mailbox_id > 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
LOG.i(
f"User {user} has tried to transfer to a mailbox owned by another user"
)
raise MailboxError("You must transfer the aliases to a mailbox you own")
if transfer_mailbox.id == mailbox.id:
LOG.i(
f"User {user} has tried to transfer to the same mailbox he is deleting"
)
raise MailboxError(
"You can not transfer the aliases to the mailbox you want to delete"
)
if not transfer_mailbox.verified:
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
raise MailboxError("Your new mailbox is not verified")
# Schedule delete account job
LOG.i(
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
)
Job.create(
name=JobType.DELETE_MAILBOX.value,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id
if transfer_mailbox_id and transfer_mailbox_id > 0
else None,
"send_mail": send_mail,
},
run_at=arrow.now(),
commit=True,
)
return mailbox
def clear_activation_codes_for_mailbox(mailbox: Mailbox):
Session.query(MailboxActivation).filter(
MailboxActivation.mailbox_id == mailbox.id
).delete()
Session.commit()
def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
)
raise MailboxError("Invalid mailbox")
if mailbox.user_id != user.id:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
)
raise MailboxError("Invalid mailbox")
if mailbox.verified and not mailbox.new_email:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
)
clear_activation_codes_for_mailbox(mailbox)
return mailbox
activation = (
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
.order_by(MailboxActivation.created_at.desc())
.first()
)
if not activation:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because there is no activation"
)
raise MailboxError("Invalid code")
if activation.tries >= MAX_ACTIVATION_TRIES:
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
clear_activation_codes_for_mailbox(mailbox)
raise CannotVerifyError(
"Invalid activation code. Please request another code.",
deleted_activation_code=True,
)
if activation.created_at < arrow.now().shift(minutes=-15):
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
)
clear_activation_codes_for_mailbox(mailbox)
raise CannotVerifyError("Invalid activation code. Please request another code.")
if code != activation.code:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because code does not match"
)
activation.tries = activation.tries + 1
Session.commit()
raise CannotVerifyError("Invalid activation code")
if mailbox.new_email:
LOG.i(
f"User {user} has verified mailbox email change from {mailbox.email} to {mailbox.new_email}"
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
)
mailbox.email = mailbox.new_email
mailbox.new_email = None
mailbox.verified = True
elif not mailbox.verified:
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
mailbox.verified = True
emit_user_audit_log(
user=user,
action=UserAuditLogAction.VerifyMailbox,
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
)
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
raise MailboxError("That address is already in use")
else:
LOG.i(
"User {user} alread has mailbox {mailbox} verified and no pending email change"
)
clear_activation_codes_for_mailbox(mailbox)
return mailbox
def generate_activation_code(
mailbox: Mailbox, use_digit_code: bool = False
) -> MailboxActivation:
clear_activation_codes_for_mailbox(mailbox)
if use_digit_code:
if config.MAILBOX_VERIFICATION_OVERRIDE_CODE:
code = config.MAILBOX_VERIFICATION_OVERRIDE_CODE
else:
code = "{:06d}".format(secrets.randbelow(1000000))[:6]
else:
code = secrets.token_urlsafe(16)
return MailboxActivation.create(
mailbox_id=mailbox.id,
code=code,
tries=0,
commit=True,
)
def send_verification_email(
user: User,
mailbox: Mailbox,
activation: MailboxActivation,
send_link: bool = True,
):
LOG.i(
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
)
if send_link:
verification_url = (
config.URL
+ "/dashboard/mailbox_verify"
+ f"?mailbox_id={mailbox.id}&code={activation.code}"
)
else:
verification_url = None
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
code=activation.code,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
code=activation.code,
link=verification_url,
mailbox_email=mailbox.email,
),
)
def send_change_email(user: User, mailbox: Mailbox, activation: MailboxActivation):
verification_url = f"{config.URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox.id}&code={activation.code}"
send_email(
mailbox.new_email,
"Confirm mailbox change on SimpleLogin",
render(
"transactional/verify-mailbox-change.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
mailbox_new_email=mailbox.new_email,
),
render(
"transactional/verify-mailbox-change.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
mailbox_new_email=mailbox.new_email,
),
)
def request_mailbox_email_change(
user: User,
mailbox: Mailbox,
new_email: str,
email_ownership_verified: bool = False,
send_email: bool = True,
use_digit_codes: bool = False,
) -> CreateMailboxOutput:
new_email = sanitize_email(new_email)
if new_email == mailbox.email:
raise MailboxError("Same email")
check_email_for_mailbox(new_email, user)
if email_ownership_verified:
mailbox.email = new_email
mailbox.new_email = None
mailbox.verified = True
else:
mailbox.new_email = new_email
emit_user_audit_log(
user=user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Updated mailbox {mailbox.id} email ({new_email}) pre-verified({email_ownership_verified}",
)
try:
Session.commit()
except IntegrityError:
LOG.i(f"This email {new_email} is already pending for some mailbox")
Session.rollback()
raise MailboxError("Email already in use")
if email_ownership_verified:
LOG.i(f"User {user} as created a pre-verified mailbox with {new_email}")
return CreateMailboxOutput(mailbox=mailbox, activation=None)
LOG.i(f"User {user} has updated mailbox email with {new_email}")
activation = generate_activation_code(mailbox, use_digit_code=use_digit_codes)
output = CreateMailboxOutput(mailbox=mailbox, activation=activation)
if not send_email:
LOG.i(f"Skipping sending validation email for mailbox {mailbox}")
return output
send_change_email(
user,
mailbox,
activation=activation,
)
return output
class MailboxEmailChangeError(Enum):
InvalidId = 1
EmailAlreadyUsed = 2
@dataclasses.dataclass
class MailboxEmailChangeResult:
error: Optional[MailboxEmailChangeError]
message: str
message_category: str
def perform_mailbox_email_change(mailbox_id: int) -> MailboxEmailChangeResult:
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
# new_email can be None if user cancels change in the meantime
if mailbox and mailbox.new_email:
user = mailbox.user
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
return MailboxEmailChangeResult(
error=MailboxEmailChangeError.EmailAlreadyUsed,
message=f"{mailbox.new_email} is already used",
message_category="error",
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
)
mailbox.email = mailbox.new_email
mailbox.new_email = None
# mark mailbox as verified if the change request is sent from an unverified mailbox
mailbox.verified = True
Session.commit()
LOG.d("Mailbox change %s is verified", mailbox)
return MailboxEmailChangeResult(
error=None,
message=f"The {mailbox.email} is updated",
message_category="success",
)
else:
return MailboxEmailChangeResult(
error=MailboxEmailChangeError.InvalidId,
message="Invalid link",
message_category="error",
)
def cancel_email_change(mailbox_id: int, user: User):
mailbox = Mailbox.get(mailbox_id)
if not mailbox:
LOG.i(
f"User {user} has tried to cancel a mailbox an unknown mailbox {mailbox_id}"
)
raise MailboxError("Invalid mailbox")
if mailbox.user.id != user.id:
LOG.i(
f"User {user} has tried to cancel a mailbox {mailbox} owned by another user"
)
raise MailboxError("Invalid mailbox")
mailbox.new_email = None
LOG.i(f"User {mailbox.user} has cancelled mailbox email change")
clear_activation_codes_for_mailbox(mailbox)
def __get_alias_mailbox_from_email(
email_address: str, alias: Alias
) -> Optional[Mailbox]:
for mailbox in alias.mailboxes:
if mailbox.email == email_address:
return mailbox
for authorized_address in mailbox.authorized_addresses:
if authorized_address.email == email_address:
LOG.d(
"Found an authorized address for %s %s %s",
alias,
mailbox,
authorized_address,
)
return mailbox
return None
def __get_alias_mailbox_from_email_or_canonical_email(
email_address: str, alias: Alias
) -> Optional[Mailbox]:
# We need to first check for the uncanonicalized version because we still have users in the db with the
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
mbox = __get_alias_mailbox_from_email(email_address, alias)
if mbox is not None:
return mbox
canonical_email = canonicalize_email(email_address)
if canonical_email != email_address:
return __get_alias_mailbox_from_email(canonical_email, alias)
return None
def get_mailbox_for_reply_phase(
envelope_mail_from: str, header_mail_from: str, alias
) -> Optional[Mailbox]:
"""return the corresponding mailbox given the mail_from and alias
Usually the mail_from=mailbox.email but it can also be one of the authorized address
"""
mbox = __get_alias_mailbox_from_email_or_canonical_email(envelope_mail_from, alias)
if mbox is not None:
return mbox
if not header_mail_from:
return None
envelope_from_domain = get_email_domain_part(envelope_mail_from)
header_from_domain = get_email_domain_part(header_mail_from)
if envelope_from_domain != header_from_domain:
return None
# For services that use VERP sending (envelope from has encoded data to account for bounces)
# if the domain is the same in the header from as the envelope from we can use the header from
return __get_alias_mailbox_from_email_or_canonical_email(header_mail_from, alias)

View File

@ -24,14 +24,15 @@ from sqlalchemy import text, desc, CheckConstraint, Index, Column
from sqlalchemy.dialects.postgresql import TSVECTOR from sqlalchemy.dialects.postgresql import TSVECTOR
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import deferred from sqlalchemy.orm import deferred
from sqlalchemy.orm.exc import ObjectDeletedError
from sqlalchemy.sql import and_ from sqlalchemy.sql import and_
from sqlalchemy_utils import ArrowType from sqlalchemy_utils import ArrowType
from app import config, rate_limiter from app import config, rate_limiter
from app import s3 from app import s3
from app.constants import JobType
from app.db import Session from app.db import Session
from app.dns_utils import get_mx_domains from app.dns_utils import get_mx_domains
from app.errors import ( from app.errors import (
AliasInTrashError, AliasInTrashError,
DirectoryInTrashError, DirectoryInTrashError,
@ -157,6 +158,8 @@ class File(Base, ModelMixin):
path = sa.Column(sa.String(128), unique=True, nullable=False) path = sa.Column(sa.String(128), unique=True, nullable=False)
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True) user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
__table_args__ = (sa.Index("ix_file_user_id", "user_id"),)
def get_url(self, expires_in=3600): def get_url(self, expires_in=3600):
return s3.get_url(self.path, expires_in) return s3.get_url(self.path, expires_in)
@ -236,6 +239,7 @@ class AuditLogActionEnum(EnumE):
disable_user = 9 disable_user = 9
enable_user = 10 enable_user = 10
stop_trial = 11 stop_trial = 11
unlink_user = 12
class Phase(EnumE): class Phase(EnumE):
@ -272,6 +276,12 @@ class AliasDeleteReason(EnumE):
CustomDomainDeleted = 5 CustomDomainDeleted = 5
class JobPriority(EnumE):
Low = 1
Default = 50
High = 100
class IntEnumType(sa.types.TypeDecorator): class IntEnumType(sa.types.TypeDecorator):
impl = sa.Integer impl = sa.Integer
@ -318,6 +328,8 @@ class HibpNotifiedAlias(Base, ModelMixin):
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False) notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
__table_args__ = (sa.Index("ix_hibp_notified_alias_user_id", "user_id"),)
class Fido(Base, ModelMixin): class Fido(Base, ModelMixin):
__tablename__ = "fido" __tablename__ = "fido"
@ -332,11 +344,13 @@ class Fido(Base, ModelMixin):
name = sa.Column(sa.String(128), nullable=False, unique=False) name = sa.Column(sa.String(128), nullable=False, unique=False)
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True) user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
__table_args__ = (sa.Index("ix_fido_user_id", "user_id"),)
class User(Base, ModelMixin, UserMixin, PasswordOracle): class User(Base, ModelMixin, UserMixin, PasswordOracle):
__tablename__ = "users" __tablename__ = "users"
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0 FLAG_FREE_DISABLE_CREATE_CONTACTS = 1 << 0
FLAG_CREATED_FROM_PARTNER = 1 << 1 FLAG_CREATED_FROM_PARTNER = 1 << 1
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2 FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3 FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
@ -355,7 +369,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1" sa.Boolean, default=True, nullable=False, server_default="1"
) )
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True) activated = sa.Column(sa.Boolean, default=False, nullable=False)
# an account can be disabled if having harmful behavior # an account can be disabled if having harmful behavior
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0") disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
@ -543,7 +557,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
# bitwise flags. Allow for future expansion # bitwise flags. Allow for future expansion
flags = sa.Column( flags = sa.Column(
sa.BigInteger, sa.BigInteger,
default=FLAG_FREE_DISABLE_CREATE_ALIAS, default=FLAG_FREE_DISABLE_CREATE_CONTACTS,
server_default="0", server_default="0",
nullable=False, nullable=False,
) )
@ -564,6 +578,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime "ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
), ),
sa.Index("ix_users_delete_on", delete_on), sa.Index("ix_users_delete_on", delete_on),
sa.Index("ix_users_default_mailbox_id", default_mailbox_id),
sa.Index(
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
),
sa.Index("ix_users_profile_picture_id", profile_picture_id),
sa.Index(
"idx_users_email_trgm",
"email",
postgresql_ops={"email": "gin_trgm_ops"},
postgresql_using="gin",
),
) )
@property @property
@ -616,14 +641,23 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
if "alternative_id" not in kwargs: if "alternative_id" not in kwargs:
user.alternative_id = str(uuid.uuid4()) user.alternative_id = str(uuid.uuid4())
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
trail = ". Created from partner" if from_partner else ""
emit_user_audit_log(
user=user,
action=UserAuditLogAction.CreateUser,
message=f"Created user {email}{trail}",
)
# If the user is created from partner, do not notify # If the user is created from partner, do not notify
# nor give a trial # nor give a trial
if from_partner: if from_partner:
user.flags = User.FLAG_CREATED_FROM_PARTNER user.flags = user.flags | User.FLAG_CREATED_FROM_PARTNER
user.notification = False user.notification = False
user.trial_end = None user.trial_end = None
Job.create( Job.create(
name=config.JOB_SEND_PROTON_WELCOME_1, name=JobType.SEND_PROTON_WELCOME_1.value,
payload={"user_id": user.id}, payload={"user_id": user.id},
run_at=arrow.now(), run_at=arrow.now(),
) )
@ -649,17 +683,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
# Schedule onboarding emails # Schedule onboarding emails
Job.create( Job.create(
name=config.JOB_ONBOARDING_1, name=JobType.ONBOARDING_1.value,
payload={"user_id": user.id}, payload={"user_id": user.id},
run_at=arrow.now().shift(days=1), run_at=arrow.now().shift(days=1),
) )
Job.create( Job.create(
name=config.JOB_ONBOARDING_2, name=JobType.ONBOARDING_2.value,
payload={"user_id": user.id}, payload={"user_id": user.id},
run_at=arrow.now().shift(days=2), run_at=arrow.now().shift(days=2),
) )
Job.create( Job.create(
name=config.JOB_ONBOARDING_4, name=JobType.ONBOARDING_4.value,
payload={"user_id": user.id}, payload={"user_id": user.id},
run_at=arrow.now().shift(days=3), run_at=arrow.now().shift(days=3),
) )
@ -973,7 +1007,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
def has_custom_domain(self): def has_custom_domain(self):
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0 return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
def custom_domains(self): def custom_domains(self) -> List["CustomDomain"]:
return CustomDomain.filter_by(user_id=self.id, verified=True).all() return CustomDomain.filter_by(user_id=self.id, verified=True).all()
def available_domains_for_random_alias( def available_domains_for_random_alias(
@ -985,8 +1019,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
- the domain - the domain
""" """
res = [] res = []
for domain in self.available_sl_domains(alias_options=alias_options): for domain in self.get_sl_domains(alias_options=alias_options):
res.append((True, domain)) res.append((True, domain.domain))
for custom_domain in self.verified_custom_domains(): for custom_domain in self.verified_custom_domains():
res.append((False, custom_domain.domain)) res.append((False, custom_domain.domain))
@ -1128,7 +1162,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
- Verified custom domains - Verified custom domains
""" """
domains = self.available_sl_domains(alias_options=alias_options) domains = [
sl_domain.domain
for sl_domain in self.get_sl_domains(alias_options=alias_options)
]
for custom_domain in self.verified_custom_domains(): for custom_domain in self.verified_custom_domains():
domains.append(custom_domain.domain) domains.append(custom_domain.domain)
@ -1165,7 +1202,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
def can_create_contacts(self) -> bool: def can_create_contacts(self) -> bool:
if self.is_premium(): if self.is_premium():
return True return True
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0: if self.flags & User.FLAG_FREE_DISABLE_CREATE_CONTACTS == 0:
return True return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
@ -1208,6 +1245,8 @@ class ActivationCode(Base, ModelMixin):
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h) expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
__table_args__ = (sa.Index("ix_activation_code_user_id", "user_id"),)
def is_expired(self): def is_expired(self):
return self.expired < arrow.now() return self.expired < arrow.now()
@ -1224,6 +1263,8 @@ class ResetPasswordCode(Base, ModelMixin):
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h) expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
__table_args__ = (sa.Index("ix_reset_password_code_user_id", "user_id"),)
def is_expired(self): def is_expired(self):
return self.expired < arrow.now() return self.expired < arrow.now()
@ -1266,6 +1307,8 @@ class MfaBrowser(Base, ModelMixin):
user = orm.relationship(User) user = orm.relationship(User)
__table_args__ = (sa.Index("ix_mfa_browser_user_id", "user_id"),)
@classmethod @classmethod
def create_new(cls, user, token_length=64) -> "MfaBrowser": def create_new(cls, user, token_length=64) -> "MfaBrowser":
found = False found = False
@ -1324,6 +1367,12 @@ class Client(Base, ModelMixin):
user = orm.relationship(User) user = orm.relationship(User)
referral = orm.relationship("Referral") referral = orm.relationship("Referral")
__table_args__ = (
sa.Index("ix_client_user_id", "user_id"),
sa.Index("ix_client_icon_id", "icon_id"),
sa.Index("ix_client_referral_id", "referral_id"),
)
def nb_user(self): def nb_user(self):
return ClientUser.filter_by(client_id=self.id).count() return ClientUser.filter_by(client_id=self.id).count()
@ -1372,6 +1421,8 @@ class RedirectUri(Base, ModelMixin):
client = orm.relationship(Client, backref="redirect_uris") client = orm.relationship(Client, backref="redirect_uris")
__table_args__ = (sa.Index("ix_redirect_uri_client_id", "client_id"),)
class AuthorizationCode(Base, ModelMixin): class AuthorizationCode(Base, ModelMixin):
__tablename__ = "authorization_code" __tablename__ = "authorization_code"
@ -1393,6 +1444,11 @@ class AuthorizationCode(Base, ModelMixin):
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m) expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
__table_args__ = (
sa.Index("ix_authorization_code_client_id", "client_id"),
sa.Index("ix_authorization_code_user_id", "user_id"),
)
def is_expired(self): def is_expired(self):
return self.expired < arrow.now() return self.expired < arrow.now()
@ -1415,6 +1471,11 @@ class OauthToken(Base, ModelMixin):
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h) expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
__table_args__ = (
sa.Index("ix_oauth_token_user_id", "user_id"),
sa.Index("ix_oauth_token_client_id", "client_id"),
)
def is_expired(self): def is_expired(self):
return self.expired < arrow.now() return self.expired < arrow.now()
@ -1568,6 +1629,7 @@ class Alias(Base, ModelMixin):
postgresql_ops={"note": "gin_trgm_ops"}, postgresql_ops={"note": "gin_trgm_ops"},
postgresql_using="gin", postgresql_using="gin",
), ),
Index("ix_alias_original_owner_id", "original_owner_id"),
) )
user = orm.relationship(User, foreign_keys=[user_id]) user = orm.relationship(User, foreign_keys=[user_id])
@ -1610,7 +1672,7 @@ class Alias(Base, ModelMixin):
return False return False
@staticmethod @staticmethod
def get_custom_domain(alias_address) -> Optional["CustomDomain"]: def get_custom_domain(alias_address: str) -> Optional["CustomDomain"]:
alias_domain = validate_email( alias_domain = validate_email(
alias_address, check_deliverability=False, allow_smtputf8=False alias_address, check_deliverability=False, allow_smtputf8=False
).domain ).domain
@ -1653,22 +1715,15 @@ class Alias(Base, ModelMixin):
custom_domain = Alias.get_custom_domain(email) custom_domain = Alias.get_custom_domain(email)
if custom_domain: if custom_domain:
new_alias.custom_domain_id = custom_domain.id new_alias.custom_domain_id = custom_domain.id
else:
custom_domain = CustomDomain.get(kw["custom_domain_id"])
# If it comes from a custom domain created from partner. Mark it as created from partner
if custom_domain is not None and custom_domain.partner_id is not None:
new_alias.flags = (new_alias.flags or 0) | Alias.FLAG_PARTNER_CREATED
Session.add(new_alias) Session.add(new_alias)
DailyMetric.get_or_create_today_metric().nb_alias += 1 DailyMetric.get_or_create_today_metric().nb_alias += 1
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasCreated, EventContent
event = AliasCreated(
alias_id=new_alias.id,
alias_email=new_alias.email,
alias_note=new_alias.note,
enabled=True,
)
EventDispatcher.send_event(user, EventContent(alias_created=event))
if ( if (
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0 new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0 and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
@ -1681,6 +1736,23 @@ class Alias(Base, ModelMixin):
if flush: if flush:
Session.flush() Session.flush()
# Internal import to avoid global import cycles
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasCreated, EventContent
event = AliasCreated(
id=new_alias.id,
email=new_alias.email,
note=new_alias.note,
enabled=True,
created_at=int(new_alias.created_at.timestamp),
)
EventDispatcher.send_event(user, EventContent(alias_created=event))
emit_alias_audit_log(
new_alias, AliasAuditLogAction.CreateAlias, "New alias created"
)
return new_alias return new_alias
@classmethod @classmethod
@ -1859,17 +1931,22 @@ class Contact(Base, ModelMixin):
MAX_NAME_LENGTH = 512 MAX_NAME_LENGTH = 512
FLAG_PARTNER_CREATED = 1 << 0
__tablename__ = "contact" __tablename__ = "contact"
__table_args__ = ( __table_args__ = (
sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"), sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"),
sa.Index("ix_contact_user_id_id", "user_id", "id"),
) )
user_id = sa.Column( user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(User.id, ondelete="cascade"),
nullable=False,
) )
alias_id = sa.Column( alias_id = sa.Column(
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(Alias.id, ondelete="cascade"),
nullable=False,
) )
name = sa.Column( name = sa.Column(
@ -1917,6 +1994,9 @@ class Contact(Base, ModelMixin):
# whether contact is created automatically during the forward phase # whether contact is created automatically during the forward phase
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False) automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
# contact flags
flags = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
@property @property
def email(self): def email(self):
return self.website_email return self.website_email
@ -2046,11 +2126,15 @@ class Contact(Base, ModelMixin):
class EmailLog(Base, ModelMixin): class EmailLog(Base, ModelMixin):
__tablename__ = "email_log" __tablename__ = "email_log"
__table_args__ = (Index("ix_email_log_created_at", "created_at"),) __table_args__ = (
Index("ix_email_log_created_at", "created_at"),
user_id = sa.Column( Index("ix_email_log_mailbox_id", "mailbox_id"),
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
Index("ix_email_log_refused_email_id", "refused_email_id"),
Index("ix_email_log_user_id_email_log_id", "user_id", "id"),
) )
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
contact_id = sa.Column( contact_id = sa.Column(
sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True
) )
@ -2322,10 +2406,12 @@ class AliasUsedOn(Base, ModelMixin):
__table_args__ = ( __table_args__ = (
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"), sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
sa.Index("ix_alias_used_on_user_id", "user_id"),
) )
alias_id = sa.Column( alias_id = sa.Column(
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(Alias.id, ondelete="cascade"),
nullable=False,
) )
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False) user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
@ -2348,6 +2434,8 @@ class ApiKey(Base, ModelMixin):
user = orm.relationship(User) user = orm.relationship(User)
__table_args__ = (sa.Index("ix_api_key_user_id", "user_id"),)
@classmethod @classmethod
def create(cls, user_id, name=None, **kwargs): def create(cls, user_id, name=None, **kwargs):
code = random_string(60) code = random_string(60)
@ -2415,6 +2503,18 @@ class CustomDomain(Base, ModelMixin):
sa.Boolean, nullable=False, default=False, server_default="0" sa.Boolean, nullable=False, default=False, server_default="0"
) )
partner_id = sa.Column(
sa.Integer,
sa.ForeignKey("partner.id"),
nullable=True,
default=None,
server_default=None,
)
pending_deletion = sa.Column(
sa.Boolean, nullable=False, default=False, server_default="0"
)
__table_args__ = ( __table_args__ = (
Index( Index(
"ix_unique_domain", # Index name "ix_unique_domain", # Index name
@ -2422,6 +2522,8 @@ class CustomDomain(Base, ModelMixin):
unique=True, unique=True,
postgresql_where=Column("ownership_verified"), postgresql_where=Column("ownership_verified"),
), # The condition ), # The condition
Index("ix_custom_domain_user_id", "user_id"),
Index("ix_custom_domain_pending_deletion", "pending_deletion"),
) )
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains") user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
@ -2439,9 +2541,6 @@ class CustomDomain(Base, ModelMixin):
def get_trash_url(self): def get_trash_url(self):
return config.URL + f"/dashboard/domains/{self.id}/trash" return config.URL + f"/dashboard/domains/{self.id}/trash"
def get_ownership_dns_txt_value(self):
return f"sl-verification={self.ownership_txt_token}"
@classmethod @classmethod
def create(cls, **kwargs): def create(cls, **kwargs):
domain = kwargs.get("domain") domain = kwargs.get("domain")
@ -2483,7 +2582,7 @@ class CustomDomain(Base, ModelMixin):
return sorted(self._auto_create_rules, key=lambda rule: rule.order) return sorted(self._auto_create_rules, key=lambda rule: rule.order)
def __repr__(self): def __repr__(self):
return f"<Custom Domain {self.domain}>" return f"<Custom Domain {self.id} {self.domain}>"
class AutoCreateRule(Base, ModelMixin): class AutoCreateRule(Base, ModelMixin):
@ -2538,6 +2637,7 @@ class DomainDeletedAlias(Base, ModelMixin):
__table_args__ = ( __table_args__ = (
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"), sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
sa.Index("ix_domain_deleted_alias_user_id", "user_id"),
) )
email = sa.Column(sa.String(256), nullable=False) email = sa.Column(sa.String(256), nullable=False)
@ -2598,6 +2698,8 @@ class Coupon(Base, ModelMixin):
# a coupon can have an expiration # a coupon can have an expiration
expires_date = sa.Column(ArrowType, nullable=True) expires_date = sa.Column(ArrowType, nullable=True)
__table_args__ = (sa.Index("ix_coupon_used_by_user_id", "used_by_user_id"),)
class Directory(Base, ModelMixin): class Directory(Base, ModelMixin):
__tablename__ = "directory" __tablename__ = "directory"
@ -2612,6 +2714,8 @@ class Directory(Base, ModelMixin):
"Mailbox", secondary="directory_mailbox", lazy="joined" "Mailbox", secondary="directory_mailbox", lazy="joined"
) )
__table_args__ = (sa.Index("ix_directory_user_id", "user_id"),)
@property @property
def mailboxes(self): def mailboxes(self):
if self._mailboxes: if self._mailboxes:
@ -2672,12 +2776,19 @@ class Job(Base, ModelMixin):
nullable=False, nullable=False,
server_default=str(JobState.ready.value), server_default=str(JobState.ready.value),
default=JobState.ready.value, default=JobState.ready.value,
index=True,
) )
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0) attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
taken_at = sa.Column(ArrowType, nullable=True) taken_at = sa.Column(ArrowType, nullable=True)
priority = sa.Column(
IntEnumType(JobPriority),
default=JobPriority.Default,
server_default=str(JobPriority.Default.value),
nullable=False,
)
__table_args__ = (Index("ix_state_run_at_taken_at", state, run_at, taken_at),) __table_args__ = (
Index("ix_state_run_at_taken_at_priority", state, run_at, taken_at, priority),
)
def __repr__(self): def __repr__(self):
return f"<Job {self.id} {self.name} {self.payload}>" return f"<Job {self.id} {self.name} {self.payload}>"
@ -2685,9 +2796,7 @@ class Job(Base, ModelMixin):
class Mailbox(Base, ModelMixin): class Mailbox(Base, ModelMixin):
__tablename__ = "mailbox" __tablename__ = "mailbox"
user_id = sa.Column( user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
)
email = sa.Column(sa.String(256), nullable=False, index=True) email = sa.Column(sa.String(256), nullable=False, index=True)
verified = sa.Column(sa.Boolean, default=False, nullable=False) verified = sa.Column(sa.Boolean, default=False, nullable=False)
force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False) force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False)
@ -2713,7 +2822,17 @@ class Mailbox(Base, ModelMixin):
generic_subject = sa.Column(sa.String(78), nullable=True) generic_subject = sa.Column(sa.String(78), nullable=True)
__table_args__ = (sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),) __table_args__ = (
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
# index on email column using pg_trgm
Index(
"ix_mailbox_email_trgm_idx",
"email",
postgresql_ops={"email": "gin_trgm_ops"},
postgresql_using="gin",
),
)
user = orm.relationship(User, foreign_keys=[user_id]) user = orm.relationship(User, foreign_keys=[user_id])
@ -2735,24 +2854,20 @@ class Mailbox(Base, ModelMixin):
return len(alias_ids) return len(alias_ids)
def is_proton(self) -> bool: def is_proton(self) -> bool:
if ( for proton_email_domain in config.PROTON_EMAIL_DOMAINS:
self.email.endswith("@proton.me") if self.email.endswith(f"@{proton_email_domain}"):
or self.email.endswith("@protonmail.com") return True
or self.email.endswith("@protonmail.ch")
or self.email.endswith("@proton.ch")
or self.email.endswith("@pm.me")
):
return True
from app.email_utils import get_email_local_part from app.email_utils import get_email_local_part
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email)) mx_domains = get_mx_domains(get_email_local_part(self.email))
proton_mx_domains = config.PROTON_MX_SERVERS
# Proton is the first domain # Proton is the first domain
if mx_domains and mx_domains[0][1] in ( for prio in mx_domains:
"mail.protonmail.ch.", for mx_domain in mx_domains[prio]:
"mailsec.protonmail.ch.", if mx_domain in proton_mx_domains:
): return True
return True
return False return False
@ -2801,6 +2916,16 @@ class Mailbox(Base, ModelMixin):
return f"<Mailbox {self.id} {self.email}>" return f"<Mailbox {self.id} {self.email}>"
class MailboxActivation(Base, ModelMixin):
__tablename__ = "mailbox_activation"
mailbox_id = sa.Column(
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
)
code = sa.Column(sa.String(32), nullable=False, index=True)
tries = sa.Column(sa.Integer, default=0, nullable=False)
class AccountActivation(Base, ModelMixin): class AccountActivation(Base, ModelMixin):
"""contains code to activate the user account when they sign up on mobile""" """contains code to activate the user account when they sign up on mobile"""
@ -2840,6 +2965,8 @@ class RefusedEmail(Base, ModelMixin):
# toggle this when email content (stored at full_report_path & path are deleted) # toggle this when email content (stored at full_report_path & path are deleted)
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0") deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
__table_args__ = (sa.Index("ix_refused_email_user_id", "user_id"),)
def get_url(self, expires_in=3600): def get_url(self, expires_in=3600):
if self.path: if self.path:
return s3.get_url(self.path, expires_in) return s3.get_url(self.path, expires_in)
@ -2862,6 +2989,8 @@ class Referral(Base, ModelMixin):
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals") user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
__table_args__ = (sa.Index("ix_referral_user_id", "user_id"),)
@property @property
def nb_user(self) -> int: def nb_user(self) -> int:
return User.filter_by(referral_id=self.id, activated=True).count() return User.filter_by(referral_id=self.id, activated=True).count()
@ -2901,6 +3030,12 @@ class SentAlert(Base, ModelMixin):
to_email = sa.Column(sa.String(256), nullable=False) to_email = sa.Column(sa.String(256), nullable=False)
alert_type = sa.Column(sa.String(256), nullable=False) alert_type = sa.Column(sa.String(256), nullable=False)
__table_args__ = (
sa.Index("ix_sent_alert_user_id", "user_id"),
sa.Index("ix_sent_alert_to_email", "to_email"),
sa.Index("ix_sent_alert_alert_type", "alert_type"),
)
class AliasMailbox(Base, ModelMixin): class AliasMailbox(Base, ModelMixin):
__tablename__ = "alias_mailbox" __tablename__ = "alias_mailbox"
@ -2909,7 +3044,8 @@ class AliasMailbox(Base, ModelMixin):
) )
alias_id = sa.Column( alias_id = sa.Column(
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(Alias.id, ondelete="cascade"),
nullable=False,
) )
mailbox_id = sa.Column( mailbox_id = sa.Column(
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
@ -2924,7 +3060,8 @@ class AliasHibp(Base, ModelMixin):
__table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),) __table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),)
alias_id = sa.Column( alias_id = sa.Column(
sa.Integer(), sa.ForeignKey("alias.id", ondelete="cascade"), index=True sa.Integer(),
sa.ForeignKey("alias.id", ondelete="cascade"),
) )
hibp_id = sa.Column( hibp_id = sa.Column(
sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True
@ -3114,7 +3251,7 @@ class SLDomain(Base, ModelMixin):
) )
def __repr__(self): def __repr__(self):
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}" return f"<SLDomain {self.id} {self.domain} {'Premium' if self.premium_only else 'Free'}>"
class Monitoring(Base, ModelMixin): class Monitoring(Base, ModelMixin):
@ -3146,6 +3283,11 @@ class BatchImport(Base, ModelMixin):
file = orm.relationship(File) file = orm.relationship(File)
user = orm.relationship(User) user = orm.relationship(User)
__table_args__ = (
sa.Index("ix_batch_import_file_id", "file_id"),
sa.Index("ix_batch_import_user_id", "user_id"),
)
def nb_alias(self): def nb_alias(self):
return Alias.filter_by(batch_import_id=self.id).count() return Alias.filter_by(batch_import_id=self.id).count()
@ -3166,6 +3308,7 @@ class AuthorizedAddress(Base, ModelMixin):
__table_args__ = ( __table_args__ = (
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"), sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
sa.Index("ix_authorized_address_user_id", "user_id"),
) )
mailbox = orm.relationship(Mailbox, backref="authorized_addresses") mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
@ -3307,6 +3450,8 @@ class Payout(Base, ModelMixin):
user = orm.relationship(User) user = orm.relationship(User)
__table_args__ = (sa.Index("ix_payout_user_id", "user_id"),)
class IgnoredEmail(Base, ModelMixin): class IgnoredEmail(Base, ModelMixin):
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status.""" """If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
@ -3408,6 +3553,8 @@ class PhoneReservation(Base, ModelMixin):
start = sa.Column(ArrowType, nullable=False) start = sa.Column(ArrowType, nullable=False)
end = sa.Column(ArrowType, nullable=False) end = sa.Column(ArrowType, nullable=False)
__table_args__ = (sa.Index("ix_phone_reservation_user_id", "user_id"),)
class PhoneMessage(Base, ModelMixin): class PhoneMessage(Base, ModelMixin):
__tablename__ = "phone_message" __tablename__ = "phone_message"
@ -3484,6 +3631,7 @@ class AdminAuditLog(Base):
action=AuditLogActionEnum.stop_trial.value, action=AuditLogActionEnum.stop_trial.value,
model="User", model="User",
model_id=user_id, model_id=user_id,
data={},
) )
@classmethod @classmethod
@ -3581,6 +3729,11 @@ class ProviderComplaint(Base, ModelMixin):
user = orm.relationship(User, foreign_keys=[user_id]) user = orm.relationship(User, foreign_keys=[user_id])
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id]) refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
__table_args__ = (
sa.Index("ix_provider_complaint_user_id", "user_id"),
sa.Index("ix_provider_complaint_refused_email_id", "refused_email_id"),
)
class PartnerApiToken(Base, ModelMixin): class PartnerApiToken(Base, ModelMixin):
__tablename__ = "partner_api_token" __tablename__ = "partner_api_token"
@ -3624,7 +3777,8 @@ class PartnerUser(Base, ModelMixin):
index=True, index=True,
) )
partner_id = sa.Column( partner_id = sa.Column(
sa.ForeignKey("partner.id", ondelete="cascade"), nullable=False, index=True sa.ForeignKey("partner.id", ondelete="cascade"),
nullable=False,
) )
external_user_id = sa.Column(sa.String(128), unique=False, nullable=False) external_user_id = sa.Column(sa.String(128), unique=False, nullable=False)
partner_email = sa.Column(sa.String(255), unique=False, nullable=True) partner_email = sa.Column(sa.String(255), unique=False, nullable=True)
@ -3651,7 +3805,8 @@ class PartnerSubscription(Base, ModelMixin):
) )
# when the partner subscription ends # when the partner subscription ends
end_at = sa.Column(ArrowType, nullable=False, index=True) end_at = sa.Column(ArrowType, nullable=True, index=True)
lifetime = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
partner_user = orm.relationship(PartnerUser) partner_user = orm.relationship(PartnerUser)
@ -3673,7 +3828,9 @@ class PartnerSubscription(Base, ModelMixin):
return None return None
def is_active(self): def is_active(self):
return self.end_at > arrow.now().shift(days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS) return self.lifetime or self.end_at > arrow.now().shift(
days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS
)
# endregion # endregion
@ -3704,6 +3861,8 @@ class NewsletterUser(Base, ModelMixin):
user = orm.relationship(User) user = orm.relationship(User)
newsletter = orm.relationship(Newsletter) newsletter = orm.relationship(Newsletter)
__table_args__ = (sa.Index("ix_newsletter_user_user_id", "user_id"),)
class ApiToCookieToken(Base, ModelMixin): class ApiToCookieToken(Base, ModelMixin):
__tablename__ = "api_cookie_token" __tablename__ = "api_cookie_token"
@ -3714,6 +3873,11 @@ class ApiToCookieToken(Base, ModelMixin):
user = orm.relationship(User) user = orm.relationship(User)
api_key = orm.relationship(ApiKey) api_key = orm.relationship(ApiKey)
__table_args__ = (
sa.Index("ix_api_to_cookie_token_api_key_id", "api_key_id"),
sa.Index("ix_api_to_cookie_token_user_id", "user_id"),
)
@classmethod @classmethod
def create(cls, **kwargs): def create(cls, **kwargs):
code = secrets.token_urlsafe(32) code = secrets.token_urlsafe(32)
@ -3729,28 +3893,31 @@ class SyncEvent(Base, ModelMixin):
taken_time = sa.Column( taken_time = sa.Column(
ArrowType, default=None, nullable=True, server_default=None, index=True ArrowType, default=None, nullable=True, server_default=None, index=True
) )
retry_count = sa.Column(sa.Integer, default=0, nullable=False, server_default="0")
__table_args__ = ( __table_args__ = (
sa.Index("ix_sync_event_created_at", "created_at"), sa.Index("ix_sync_event_created_at", "created_at"),
sa.Index("ix_sync_event_taken_time", "taken_time"), sa.Index("ix_sync_event_taken_time", "taken_time"),
) )
def mark_as_taken(self) -> bool: def mark_as_taken(self, allow_taken_older_than: Optional[Arrow] = None) -> bool:
sql = """ try:
UPDATE sync_event taken_condition = ["taken_time IS NULL"]
SET taken_time = :taken_time args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
WHERE id = :sync_event_id if allow_taken_older_than:
AND taken_time IS NULL taken_condition.append("taken_time < :taken_older_than")
""" args["taken_older_than"] = allow_taken_older_than.datetime
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id} sql_taken_condition = "({})".format(" OR ".join(taken_condition))
sql = f"UPDATE sync_event SET taken_time = :taken_time WHERE id = :sync_event_id AND {sql_taken_condition}"
res = Session.execute(sql, args) res = Session.execute(sql, args)
Session.commit() Session.commit()
except ObjectDeletedError:
return False
return res.rowcount > 0 return res.rowcount > 0
@classmethod @classmethod
def get_dead_letter(cls, older_than: Arrow) -> [SyncEvent]: def get_dead_letter(cls, older_than: Arrow, max_retries: int) -> [SyncEvent]:
return ( return (
SyncEvent.filter( SyncEvent.filter(
( (
@ -3763,8 +3930,45 @@ class SyncEvent(Base, ModelMixin):
& (SyncEvent.created_at < older_than) & (SyncEvent.created_at < older_than)
) )
) )
& (SyncEvent.retry_count < max_retries)
) )
.order_by(SyncEvent.id) .order_by(SyncEvent.id)
.limit(100) .limit(100)
.all() .all()
) )
class AliasAuditLog(Base, ModelMixin):
"""This model holds an audit log for all the actions performed to an alias"""
__tablename__ = "alias_audit_log"
user_id = sa.Column(sa.Integer, nullable=False)
alias_id = sa.Column(sa.Integer, nullable=False)
alias_email = sa.Column(sa.String(255), nullable=False)
action = sa.Column(sa.String(255), nullable=False)
message = sa.Column(sa.Text, default=None, nullable=True)
__table_args__ = (
sa.Index("ix_alias_audit_log_user_id", "user_id"),
sa.Index("ix_alias_audit_log_alias_id", "alias_id"),
sa.Index("ix_alias_audit_log_alias_email", "alias_email"),
sa.Index("ix_alias_audit_log_created_at", "created_at"),
)
class UserAuditLog(Base, ModelMixin):
"""This model holds an audit log for all the actions performed by a user"""
__tablename__ = "user_audit_log"
user_id = sa.Column(sa.Integer, nullable=False)
user_email = sa.Column(sa.String(255), nullable=False)
action = sa.Column(sa.String(255), nullable=False)
message = sa.Column(sa.Text, default=None, nullable=True)
__table_args__ = (
sa.Index("ix_user_audit_log_user_id", "user_id"),
sa.Index("ix_user_audit_log_user_email", "user_email"),
sa.Index("ix_user_audit_log_created_at", "created_at"),
)

View File

@ -1,4 +1,4 @@
from app.build_info import SHA1 from app.build_info import SHA1, VERSION
from app.monitor.base import monitor_bp from app.monitor.base import monitor_bp
@ -7,6 +7,11 @@ def git_sha1():
return SHA1 return SHA1
@monitor_bp.route("/version")
def version():
return VERSION
@monitor_bp.route("/live") @monitor_bp.route("/live")
def live(): def live():
return "live" return "live"

8
app/app/monitor_utils.py Normal file
View File

@ -0,0 +1,8 @@
from app.build_info import VERSION
import newrelic.agent
def send_version_event(service: str):
newrelic.agent.record_custom_event(
"ServiceVersion", {"service": service, "version": VERSION}
)

View File

@ -0,0 +1,55 @@
from typing import Optional
import arrow
from arrow import Arrow
from app.constants import JobType
from app.models import PartnerUser, PartnerSubscription, User, Job
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
def create_partner_user(
user: User, partner_id: int, partner_email: str, external_user_id: str
) -> PartnerUser:
instance = PartnerUser.create(
user_id=user.id,
partner_id=partner_id,
partner_email=partner_email,
external_user_id=external_user_id,
)
Job.create(
name=JobType.SEND_ALIAS_CREATION_EVENTS.value,
payload={"user_id": user.id},
run_at=arrow.now(),
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.LinkAccount,
message=f"Linked account to partner_id={partner_id} | partner_email={partner_email} | external_user_id={external_user_id}",
)
return instance
def create_partner_subscription(
partner_user: PartnerUser,
expiration: Optional[Arrow] = None,
lifetime: bool = False,
msg: Optional[str] = None,
) -> PartnerSubscription:
instance = PartnerSubscription.create(
partner_user_id=partner_user.id,
end_at=expiration,
lifetime=lifetime,
)
message = "User upgraded through partner subscription"
if msg:
message += f" | {msg}"
emit_user_audit_log(
user=partner_user.user,
action=UserAuditLogAction.Upgrade,
message=message,
)
return instance

View File

View File

@ -0,0 +1,121 @@
from typing import Optional
import arrow
from coinbase_commerce.error import WebhookInvalidPayload, SignatureVerificationError
from coinbase_commerce.webhook import Webhook
from flask import Flask, request
from app.config import COINBASE_WEBHOOK_SECRET
from app.db import Session
from app.email_utils import send_email, render
from app.log import LOG
from app.models import CoinbaseSubscription, User
from app.subscription_webhook import execute_subscription_webhook
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
def setup_coinbase_commerce(app: Flask):
@app.route("/coinbase", methods=["POST"])
def coinbase_webhook():
# event payload
request_data = request.data.decode("utf-8")
# webhook signature
request_sig = request.headers.get("X-CC-Webhook-Signature", None)
try:
# signature verification and event object construction
event = Webhook.construct_event(
request_data, request_sig, COINBASE_WEBHOOK_SECRET
)
except (WebhookInvalidPayload, SignatureVerificationError) as e:
LOG.e("Invalid Coinbase webhook")
return str(e), 400
LOG.d("Coinbase event %s", event)
if event["type"] == "charge:confirmed":
if handle_coinbase_event(event):
return "success", 200
else:
return "error", 400
return "success", 200
def handle_coinbase_event(event) -> bool:
server_user_id = event["data"]["metadata"]["user_id"]
try:
user_id = int(server_user_id)
except ValueError:
user_id = int(float(server_user_id))
code = event["data"]["code"]
user: Optional[User] = User.get(user_id)
if not user:
LOG.e("User not found %s", user_id)
return False
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
user_id=user_id
)
if not coinbase_subscription:
LOG.d("Create a coinbase subscription for %s", user)
coinbase_subscription = CoinbaseSubscription.create(
user_id=user_id, end_at=arrow.now().shift(years=1), code=code, commit=True
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.Upgrade,
message="Upgraded though Coinbase",
commit=True,
)
send_email(
user.email,
"Your SimpleLogin account has been upgraded",
render(
"transactional/coinbase/new-subscription.txt",
user=user,
coinbase_subscription=coinbase_subscription,
),
render(
"transactional/coinbase/new-subscription.html",
user=user,
coinbase_subscription=coinbase_subscription,
),
)
else:
if coinbase_subscription.code != code:
LOG.d("Update code from %s to %s", coinbase_subscription.code, code)
coinbase_subscription.code = code
if coinbase_subscription.is_active():
coinbase_subscription.end_at = coinbase_subscription.end_at.shift(years=1)
else: # already expired subscription
coinbase_subscription.end_at = arrow.now().shift(years=1)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.SubscriptionExtended,
message="Extended coinbase subscription",
)
Session.commit()
send_email(
user.email,
"Your SimpleLogin account has been extended",
render(
"transactional/coinbase/extend-subscription.txt",
user=user,
coinbase_subscription=coinbase_subscription,
),
render(
"transactional/coinbase/extend-subscription.html",
user=user,
coinbase_subscription=coinbase_subscription,
),
)
execute_subscription_webhook(user)
return True

286
app/app/payments/paddle.py Normal file
View File

@ -0,0 +1,286 @@
import arrow
import json
from dateutil.relativedelta import relativedelta
from flask import Flask, request
from app import paddle_utils, paddle_callback
from app.config import (
PADDLE_MONTHLY_PRODUCT_ID,
PADDLE_MONTHLY_PRODUCT_IDS,
PADDLE_YEARLY_PRODUCT_IDS,
PADDLE_COUPON_ID,
)
from app.db import Session
from app.email_utils import send_email, render
from app.log import LOG
from app.models import Subscription, PlanEnum, User, Coupon
from app.subscription_webhook import execute_subscription_webhook
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import random_string
def setup_paddle_callback(app: Flask):
@app.route("/paddle", methods=["GET", "POST"])
def paddle():
LOG.d(f"paddle callback {request.form.get('alert_name')} {request.form}")
# make sure the request comes from Paddle
if not paddle_utils.verify_incoming_request(dict(request.form)):
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
return "KO", 400
if (
request.form.get("alert_name") == "subscription_created"
): # new user subscribes
# the passthrough is json encoded, e.g.
# request.form.get("passthrough") = '{"user_id": 88 }'
passthrough = json.loads(request.form.get("passthrough"))
user_id = passthrough.get("user_id")
user = User.get(user_id)
subscription_plan_id = int(request.form.get("subscription_plan_id"))
if subscription_plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
plan = PlanEnum.monthly
elif subscription_plan_id in PADDLE_YEARLY_PRODUCT_IDS:
plan = PlanEnum.yearly
else:
LOG.e(
"Unknown subscription_plan_id %s %s",
subscription_plan_id,
request.form,
)
return "No such subscription", 400
sub = Subscription.get_by(user_id=user.id)
if not sub:
LOG.d(f"create a new Subscription for user {user}")
Subscription.create(
user_id=user.id,
cancel_url=request.form.get("cancel_url"),
update_url=request.form.get("update_url"),
subscription_id=request.form.get("subscription_id"),
event_time=arrow.now(),
next_bill_date=arrow.get(
request.form.get("next_bill_date"), "YYYY-MM-DD"
).date(),
plan=plan,
)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.Upgrade,
message="Upgraded through Paddle",
)
else:
LOG.d(f"Update an existing Subscription for user {user}")
sub.cancel_url = request.form.get("cancel_url")
sub.update_url = request.form.get("update_url")
sub.subscription_id = request.form.get("subscription_id")
sub.event_time = arrow.now()
sub.next_bill_date = arrow.get(
request.form.get("next_bill_date"), "YYYY-MM-DD"
).date()
sub.plan = plan
# make sure to set the new plan as not-cancelled
# in case user cancels a plan and subscribes a new plan
sub.cancelled = False
emit_user_audit_log(
user=user,
action=UserAuditLogAction.SubscriptionExtended,
message="Extended Paddle subscription",
)
execute_subscription_webhook(user)
LOG.d("User %s upgrades!", user)
Session.commit()
elif request.form.get("alert_name") == "subscription_payment_succeeded":
subscription_id = request.form.get("subscription_id")
LOG.d("Update subscription %s", subscription_id)
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
# when user subscribes, the "subscription_payment_succeeded" can arrive BEFORE "subscription_created"
# at that time, subscription object does not exist yet
if sub:
sub.event_time = arrow.now()
sub.next_bill_date = arrow.get(
request.form.get("next_bill_date"), "YYYY-MM-DD"
).date()
Session.commit()
execute_subscription_webhook(sub.user)
elif request.form.get("alert_name") == "subscription_cancelled":
subscription_id = request.form.get("subscription_id")
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
if sub:
# cancellation_effective_date should be the same as next_bill_date
LOG.w(
"Cancel subscription %s %s on %s, next bill date %s",
subscription_id,
sub.user,
request.form.get("cancellation_effective_date"),
sub.next_bill_date,
)
sub.event_time = arrow.now()
sub.cancelled = True
emit_user_audit_log(
user=sub.user,
action=UserAuditLogAction.SubscriptionCancelled,
message="Cancelled Paddle subscription",
)
Session.commit()
user = sub.user
send_email(
user.email,
"SimpleLogin - your subscription is canceled",
render(
"transactional/subscription-cancel.txt",
user=user,
end_date=request.form.get("cancellation_effective_date"),
),
)
execute_subscription_webhook(sub.user)
else:
# user might have deleted their account
LOG.i(f"Cancel non-exist subscription {subscription_id}")
return "OK"
elif request.form.get("alert_name") == "subscription_updated":
subscription_id = request.form.get("subscription_id")
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
if sub:
next_bill_date = request.form.get("next_bill_date")
if not next_bill_date:
paddle_callback.failed_payment(sub, subscription_id)
return "OK"
LOG.d(
"Update subscription %s %s on %s, next bill date %s",
subscription_id,
sub.user,
request.form.get("cancellation_effective_date"),
sub.next_bill_date,
)
if (
int(request.form.get("subscription_plan_id"))
== PADDLE_MONTHLY_PRODUCT_ID
):
plan = PlanEnum.monthly
else:
plan = PlanEnum.yearly
sub.cancel_url = request.form.get("cancel_url")
sub.update_url = request.form.get("update_url")
sub.event_time = arrow.now()
sub.next_bill_date = arrow.get(
request.form.get("next_bill_date"), "YYYY-MM-DD"
).date()
sub.plan = plan
# make sure to set the new plan as not-cancelled
sub.cancelled = False
emit_user_audit_log(
user=sub.user,
action=UserAuditLogAction.SubscriptionExtended,
message="Extended Paddle subscription",
)
Session.commit()
execute_subscription_webhook(sub.user)
else:
LOG.w(
f"update non-exist subscription {subscription_id}. {request.form}"
)
return "No such subscription", 400
elif request.form.get("alert_name") == "payment_refunded":
subscription_id = request.form.get("subscription_id")
LOG.d("Refund request for subscription %s", subscription_id)
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
if sub:
user = sub.user
Subscription.delete(sub.id)
emit_user_audit_log(
user=user,
action=UserAuditLogAction.SubscriptionCancelled,
message="Paddle subscription cancelled as user requested a refund",
)
Session.commit()
LOG.e("%s requests a refund", user)
execute_subscription_webhook(sub.user)
elif request.form.get("alert_name") == "subscription_payment_refunded":
subscription_id = request.form.get("subscription_id")
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
LOG.d(
"Handle subscription_payment_refunded for subscription %s",
subscription_id,
)
if not sub:
LOG.w(
"No such subscription for %s, payload %s",
subscription_id,
request.form,
)
return "No such subscription"
plan_id = int(request.form["subscription_plan_id"])
if request.form["refund_type"] == "full":
if plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
LOG.d("subtract 1 month from next_bill_date %s", sub.next_bill_date)
sub.next_bill_date = sub.next_bill_date - relativedelta(months=1)
LOG.d("next_bill_date is %s", sub.next_bill_date)
Session.commit()
elif plan_id in PADDLE_YEARLY_PRODUCT_IDS:
LOG.d("subtract 1 year from next_bill_date %s", sub.next_bill_date)
sub.next_bill_date = sub.next_bill_date - relativedelta(years=1)
LOG.d("next_bill_date is %s", sub.next_bill_date)
Session.commit()
else:
LOG.e("Unknown plan_id %s", plan_id)
else:
LOG.w("partial subscription_payment_refunded, not handled")
execute_subscription_webhook(sub.user)
return "OK"
@app.route("/paddle_coupon", methods=["GET", "POST"])
def paddle_coupon():
LOG.d("paddle coupon callback %s", request.form)
if not paddle_utils.verify_incoming_request(dict(request.form)):
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
return "KO", 400
product_id = request.form.get("p_product_id")
if product_id != PADDLE_COUPON_ID:
LOG.e("product_id %s not match with %s", product_id, PADDLE_COUPON_ID)
return "KO", 400
email = request.form.get("email")
LOG.d("Paddle coupon request for %s", email)
coupon = Coupon.create(
code=random_string(30),
comment="For 1-year coupon",
expires_date=arrow.now().shift(years=1, days=-1),
commit=True,
)
return (
f"Your 1-year coupon is <b>{coupon.code}</b> <br> "
f"It's valid until <b>{coupon.expires_date.date().isoformat()}</b>"
)

View File

@ -16,6 +16,7 @@ PROTON_ERROR_CODE_HV_NEEDED = 9001
PLAN_FREE = 1 PLAN_FREE = 1
PLAN_PREMIUM = 2 PLAN_PREMIUM = 2
PLAN_PREMIUM_LIFETIME = 3
@dataclass @dataclass
@ -112,10 +113,13 @@ class HttpProtonClient(ProtonClient):
if plan_value == PLAN_FREE: if plan_value == PLAN_FREE:
plan = SLPlan(type=SLPlanType.Free, expiration=None) plan = SLPlan(type=SLPlanType.Free, expiration=None)
elif plan_value == PLAN_PREMIUM: elif plan_value == PLAN_PREMIUM:
expiration = info.get("PlanExpiration", "1")
plan = SLPlan( plan = SLPlan(
type=SLPlanType.Premium, type=SLPlanType.Premium,
expiration=Arrow.fromtimestamp(info["PlanExpiration"], tzinfo="utc"), expiration=Arrow.fromtimestamp(expiration, tzinfo="utc"),
) )
elif plan_value == PLAN_PREMIUM_LIFETIME:
plan = SLPlan(SLPlanType.PremiumLifetime, expiration=None)
else: else:
raise Exception(f"Invalid value for plan: {plan_value}") raise Exception(f"Invalid value for plan: {plan_value}")

View File

@ -1,10 +1,8 @@
from newrelic import agent
from typing import Optional from typing import Optional
from app.db import Session from app.db import Session
from app.log import LOG
from app.errors import ProtonPartnerNotSetUp from app.errors import ProtonPartnerNotSetUp
from app.models import Partner, PartnerUser, User from app.models import Partner
PROTON_PARTNER_NAME = "Proton" PROTON_PARTNER_NAME = "Proton"
_PROTON_PARTNER: Optional[Partner] = None _PROTON_PARTNER: Optional[Partner] = None
@ -23,15 +21,3 @@ def get_proton_partner() -> Partner:
def is_proton_partner(partner: Partner) -> bool: def is_proton_partner(partner: Partner) -> bool:
return partner.name == PROTON_PARTNER_NAME return partner.name == PROTON_PARTNER_NAME
def perform_proton_account_unlink(current_user: User):
proton_partner = get_proton_partner()
partner_user = PartnerUser.get_by(
user_id=current_user.id, partner_id=proton_partner.id
)
if partner_user is not None:
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
PartnerUser.delete(partner_user.id)
Session.commit()
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})

View File

@ -0,0 +1,39 @@
from newrelic import agent
from app.db import Session
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserUnlinked
from app.log import LOG
from app.models import User, PartnerUser
from app.proton.proton_partner import get_proton_partner
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
def can_unlink_proton_account(user: User) -> bool:
return (user.flags & User.FLAG_CREATED_FROM_PARTNER) == 0
def perform_proton_account_unlink(
current_user: User, skip_check: bool = False
) -> None | str:
if not skip_check and not can_unlink_proton_account(current_user):
return None
proton_partner = get_proton_partner()
partner_user = PartnerUser.get_by(
user_id=current_user.id, partner_id=proton_partner.id
)
if partner_user is not None:
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
emit_user_audit_log(
user=current_user,
action=UserAuditLogAction.UnlinkAccount,
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
)
EventDispatcher.send_event(
partner_user.user, EventContent(user_unlinked=UserUnlinked())
)
PartnerUser.delete(partner_user.id)
external_user_id = partner_user.external_user_id
Session.commit()
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
return external_user_id

6
app/app/request_utils.py Normal file
View File

@ -0,0 +1,6 @@
from random import randbytes
from base64 import b64encode
def generate_request_id() -> str:
return b64encode(randbytes(6)).decode()

21
app/app/sentry_utils.py Normal file
View File

@ -0,0 +1,21 @@
from typing import Optional
from sentry_sdk.types import Event, Hint
_HTTP_CODES_TO_IGNORE = [416]
def _should_send(_event: Event, hint: Hint) -> bool:
# Check if this is an HTTP Exception event
if "exc_info" in hint:
exc_type, exc_value, exc_traceback = hint["exc_info"]
# Check if it's a Werkzeug HTTPException (raised for HTTP status codes)
if hasattr(exc_value, "code") and exc_value.code in _HTTP_CODES_TO_IGNORE:
return False
return True
def sentry_before_send(event: Event, hint: Hint) -> Optional[Event]:
if _should_send(event, hint):
return event
return None

View File

@ -1,6 +1,7 @@
"""Inspired from """Inspired from
https://github.com/petermat/spamassassin_client https://github.com/petermat/spamassassin_client
""" """
import logging import logging
import socket import socket
from io import BytesIO from io import BytesIO

View File

@ -1,38 +1,16 @@
import requests from app.db import Session
from requests import RequestException
from app import config
from app.events.event_dispatcher import EventDispatcher from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG
from app.models import User from app.models import User
def execute_subscription_webhook(user: User): def execute_subscription_webhook(user: User):
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
if webhook_url is None:
return
subscription_end = user.get_active_subscription_end( subscription_end = user.get_active_subscription_end(
include_partner_subscription=False include_partner_subscription=False
) )
sl_subscription_end = None sl_subscription_end = None
if subscription_end: if subscription_end:
sl_subscription_end = subscription_end.timestamp sl_subscription_end = subscription_end.timestamp
payload = {
"user_id": user.id,
"is_premium": user.is_premium(),
"active_subscription_end": sl_subscription_end,
}
try:
response = requests.post(webhook_url, json=payload, timeout=2)
if response.status_code == 200:
LOG.i("Sent request to subscription update webhook successfully")
else:
LOG.i(
f"Request to webhook failed with statue {response.status_code}: {response.text}"
)
except RequestException as e:
LOG.error(f"Subscription request exception: {e}")
event = UserPlanChanged(plan_end_time=sl_subscription_end) event = UserPlanChanged(plan_end_time=sl_subscription_end)
EventDispatcher.send_event(user, EventContent(user_plan_change=event)) EventDispatcher.send_event(user, EventContent(user_plan_change=event))
Session.commit()

View File

@ -0,0 +1,44 @@
from enum import Enum
from app.models import User, UserAuditLog
class UserAuditLogAction(Enum):
CreateUser = "create_user"
ActivateUser = "activate_user"
ResetPassword = "reset_password"
Upgrade = "upgrade"
SubscriptionExtended = "subscription_extended"
SubscriptionCancelled = "subscription_cancelled"
LinkAccount = "link_account"
UnlinkAccount = "unlink_account"
CreateMailbox = "create_mailbox"
VerifyMailbox = "verify_mailbox"
UpdateMailbox = "update_mailbox"
DeleteMailbox = "delete_mailbox"
CreateCustomDomain = "create_custom_domain"
VerifyCustomDomain = "verify_custom_domain"
UpdateCustomDomain = "update_custom_domain"
DeleteCustomDomain = "delete_custom_domain"
CreateDirectory = "create_directory"
UpdateDirectory = "update_directory"
DeleteDirectory = "delete_directory"
UserMarkedForDeletion = "user_marked_for_deletion"
DeleteUser = "delete_user"
def emit_user_audit_log(
user: User, action: UserAuditLogAction, message: str, commit: bool = False
):
UserAuditLog.create(
user_id=user.id,
user_email=user.email,
action=action.value,
message=message,
commit=commit,
)

78
app/app/user_settings.py Normal file
View File

@ -0,0 +1,78 @@
from typing import Optional
from app.db import Session
from app.log import LOG
from app.models import User, SLDomain, CustomDomain, Mailbox
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
class CannotSetAlias(Exception):
def __init__(self, msg: str):
self.msg = msg
class CannotSetMailbox(Exception):
def __init__(self, msg: str):
self.msg = msg
def set_default_alias_domain(user: User, domain_name: Optional[str]):
if not domain_name:
LOG.i(f"User {user} has set no domain as default domain")
user.default_alias_public_domain_id = None
user.default_alias_custom_domain_id = None
Session.flush()
return
sl_domain: SLDomain = SLDomain.get_by(domain=domain_name)
if sl_domain:
if sl_domain.hidden:
LOG.i(f"User {user} has tried to set up a hidden domain as default domain")
raise CannotSetAlias("Domain does not exist")
if sl_domain.premium_only and not user.is_premium():
LOG.i(f"User {user} has tried to set up a premium domain as default domain")
raise CannotSetAlias("You cannot use this domain")
LOG.i(f"User {user} has set public {sl_domain} as default domain")
user.default_alias_public_domain_id = sl_domain.id
user.default_alias_custom_domain_id = None
Session.flush()
return
custom_domain = CustomDomain.get_by(domain=domain_name)
if not custom_domain:
LOG.i(
f"User {user} has tried to set up an non existing domain as default domain"
)
raise CannotSetAlias("Domain does not exist or it hasn't been verified")
if custom_domain.user_id != user.id or not custom_domain.verified:
LOG.i(
f"User {user} has tried to set domain {custom_domain} as default domain that does not belong to the user or that is not verified"
)
raise CannotSetAlias("Domain does not exist or it hasn't been verified")
LOG.i(f"User {user} has set custom {custom_domain} as default domain")
user.default_alias_public_domain_id = None
user.default_alias_custom_domain_id = custom_domain.id
Session.flush()
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
raise CannotSetMailbox("Invalid mailbox")
if not mailbox.verified:
raise CannotSetMailbox("This is mailbox is not verified")
if mailbox.id == user.default_mailbox_id:
return mailbox
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
user.default_mailbox_id = mailbox.id
emit_user_audit_log(
user=user,
action=UserAuditLogAction.UpdateMailbox,
message=f"Set mailbox {mailbox.id} ({mailbox.email}) as default",
)
Session.commit()
return mailbox

View File

@ -1,4 +1,3 @@
import random
import re import re
import secrets import secrets
import string import string
@ -32,8 +31,9 @@ def random_words(words: int = 2, numbers: int = 0):
fields = [secrets.choice(_words) for i in range(words)] fields = [secrets.choice(_words) for i in range(words)]
if numbers > 0: if numbers > 0:
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)]) digits = [n for n in range(10)]
return "_".join(fields) + digits suffix = "".join([str(secrets.choice(digits)) for i in range(numbers)])
return "_".join(fields) + suffix
else: else:
return "_".join(fields) return "_".join(fields)

View File

@ -14,8 +14,9 @@ from sqlalchemy.sql import Insert, text
from app import s3, config from app import s3, config
from app.alias_utils import nb_email_log_for_mailbox from app.alias_utils import nb_email_log_for_mailbox
from app.api.views.apple import verify_receipt from app.api.views.apple import verify_receipt
from app.custom_domain_validation import CustomDomainValidation, is_mx_equivalent
from app.db import Session from app.db import Session
from app.dns_utils import get_mx_domains, is_mx_equivalent from app.dns_utils import get_mx_domains
from app.email_utils import ( from app.email_utils import (
send_email, send_email,
send_trial_end_soon_email, send_trial_end_soon_email,
@ -58,9 +59,12 @@ from app.models import (
ApiToCookieToken, ApiToCookieToken,
) )
from app.pgp_utils import load_public_key_and_check, PGPException from app.pgp_utils import load_public_key_and_check, PGPException
from app.proton.utils import get_proton_partner from app.proton.proton_partner import get_proton_partner
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
from app.utils import sanitize_email from app.utils import sanitize_email
from server import create_light_app from server import create_light_app
from tasks.clean_alias_audit_log import cleanup_alias_audit_log
from tasks.clean_user_audit_log import cleanup_user_audit_log
from tasks.cleanup_old_imports import cleanup_old_imports from tasks.cleanup_old_imports import cleanup_old_imports
from tasks.cleanup_old_jobs import cleanup_old_jobs from tasks.cleanup_old_jobs import cleanup_old_jobs
from tasks.cleanup_old_notifications import cleanup_old_notifications from tasks.cleanup_old_notifications import cleanup_old_notifications
@ -282,8 +286,16 @@ def notify_manual_sub_end():
def poll_apple_subscription(): def poll_apple_subscription():
"""Poll Apple API to update AppleSubscription""" """Poll Apple API to update AppleSubscription"""
# todo: only near the end of the subscription for apple_sub in (
for apple_sub in AppleSubscription.all(): AppleSubscription.filter(
AppleSubscription.expires_date < arrow.now().shift(days=15)
)
.enable_eagerloads(False)
.yield_per(100)
):
if not apple_sub.is_valid():
# Subscription is not valid anymore and hasn't been renewed
continue
if not apple_sub.product_id: if not apple_sub.product_id:
LOG.d("Ignore %s", apple_sub) LOG.d("Ignore %s", apple_sub)
continue continue
@ -896,6 +908,24 @@ def check_mailbox_valid_pgp_keys():
def check_custom_domain(): def check_custom_domain():
# Delete custom domains that haven't been verified in a month
for custom_domain in (
CustomDomain.filter(
CustomDomain.verified == False, # noqa: E712
CustomDomain.created_at < arrow.now().shift(months=-1),
)
.enable_eagerloads(False)
.yield_per(100)
):
alias_count = Alias.filter(Alias.custom_domain_id == custom_domain.id).count()
if alias_count > 0:
LOG.warn(
f"Custom Domain {custom_domain} has {alias_count} aliases. Won't delete"
)
else:
LOG.i(f"Deleting unverified old custom domain {custom_domain}")
CustomDomain.delete(custom_domain.id)
LOG.d("Check verified domain for DNS issues") LOG.d("Check verified domain for DNS issues")
for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain
@ -905,9 +935,11 @@ def check_custom_domain():
LOG.i("custom domain has been deleted") LOG.i("custom domain has been deleted")
def check_single_custom_domain(custom_domain): def check_single_custom_domain(custom_domain: CustomDomain):
mx_domains = get_mx_domains(custom_domain.domain) mx_domains = get_mx_domains(custom_domain.domain)
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY): validator = CustomDomainValidation(dkim_domain=config.EMAIL_DOMAIN)
expected_custom_domains = validator.get_expected_mx_records(custom_domain)
if not is_mx_equivalent(mx_domains, expected_custom_domains):
user = custom_domain.user user = custom_domain.user
LOG.w( LOG.w(
"The MX record is not correctly set for %s %s %s", "The MX record is not correctly set for %s %s %s",
@ -965,7 +997,7 @@ def delete_expired_tokens():
LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row) LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row)
async def _hibp_check(api_key, queue): async def _hibp_check(api_key: str, queue: asyncio.Queue):
""" """
Uses a single API key to check the queue as fast as possible. Uses a single API key to check the queue as fast as possible.
@ -984,11 +1016,16 @@ async def _hibp_check(api_key, queue):
if not alias: if not alias:
continue continue
user = alias.user user = alias.user
if user.disabled or not user.is_paid(): if user.disabled or not user.is_premium():
# Mark it as hibp done to skip it as if it had been checked # Mark it as hibp done to skip it as if it had been checked
alias.hibp_last_check = arrow.utcnow() alias.hibp_last_check = arrow.utcnow()
Session.commit() Session.commit()
continue continue
if alias.flags & Alias.FLAG_PARTNER_CREATED > 0:
# Mark as hibp done
alias.hibp_last_check = arrow.utcnow()
Session.commit()
continue
LOG.d("Checking HIBP for %s", alias) LOG.d("Checking HIBP for %s", alias)
@ -1215,7 +1252,7 @@ def notify_hibp():
def clear_users_scheduled_to_be_deleted(dry_run=False): def clear_users_scheduled_to_be_deleted(dry_run=False):
users = User.filter( users: List[User] = User.filter(
and_( and_(
User.delete_on.isnot(None), User.delete_on.isnot(None),
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS), User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
@ -1227,6 +1264,11 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
) )
if dry_run: if dry_run:
continue continue
emit_user_audit_log(
user=user,
action=UserAuditLogAction.DeleteUser,
message=f"Delete user {user.id} ({user.email})",
)
User.delete(user.id) User.delete(user.id)
Session.commit() Session.commit()
@ -1238,6 +1280,16 @@ def delete_old_data():
cleanup_old_notifications(oldest_valid) cleanup_old_notifications(oldest_valid)
def clear_alias_audit_log():
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
cleanup_alias_audit_log(oldest_valid)
def clear_user_audit_log():
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
cleanup_user_audit_log(oldest_valid)
if __name__ == "__main__": if __name__ == "__main__":
LOG.d("Start running cronjob") LOG.d("Start running cronjob")
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -1246,22 +1298,6 @@ if __name__ == "__main__":
"--job", "--job",
help="Choose a cron job to run", help="Choose a cron job to run",
type=str, type=str,
choices=[
"stats",
"notify_trial_end",
"notify_manual_subscription_end",
"notify_premium_end",
"delete_logs",
"delete_old_data",
"poll_apple_subscription",
"sanity_check",
"delete_old_monitoring",
"check_custom_domain",
"check_hibp",
"notify_hibp",
"cleanup_tokens",
"send_undelivered_mails",
],
) )
args = parser.parse_args() args = parser.parse_args()
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc # wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
@ -1310,4 +1346,10 @@ if __name__ == "__main__":
load_unsent_mails_from_fs_and_resend() load_unsent_mails_from_fs_and_resend()
elif args.job == "delete_scheduled_users": elif args.job == "delete_scheduled_users":
LOG.d("Deleting users scheduled to be deleted") LOG.d("Deleting users scheduled to be deleted")
clear_users_scheduled_to_be_deleted(dry_run=True) clear_users_scheduled_to_be_deleted()
elif args.job == "clear_alias_audit_log":
LOG.d("Clearing alias audit log")
clear_alias_audit_log()
elif args.job == "clear_user_audit_log":
LOG.d("Clearing user audit log")
clear_user_audit_log()

View File

@ -14,15 +14,28 @@ jobs:
- name: SimpleLogin Custom Domain check - name: SimpleLogin Custom Domain check
command: python /code/cron.py -j check_custom_domain command: python /code/cron.py -j check_custom_domain
shell: /bin/bash shell: /bin/bash
schedule: "15 2 * * *" schedule: "15 */4 * * *"
captureStderr: true captureStderr: true
concurrencyPolicy: Forbid
onFailure:
retry:
maximumRetries: 10
initialDelay: 1
maximumDelay: 30
backoffMultiplier: 2
- name: SimpleLogin HIBP check - name: SimpleLogin HIBP check
command: python /code/cron.py -j check_hibp command: python /code/cron.py -j check_hibp
shell: /bin/bash shell: /bin/bash
schedule: "15 3 * * *" schedule: "13 */4 * * *"
captureStderr: true captureStderr: true
concurrencyPolicy: Forbid concurrencyPolicy: Forbid
onFailure:
retry:
maximumRetries: 10
initialDelay: 1
maximumDelay: 30
backoffMultiplier: 2
- name: SimpleLogin Notify HIBP breaches - name: SimpleLogin Notify HIBP breaches
command: python /code/cron.py -j notify_hibp command: python /code/cron.py -j notify_hibp
@ -31,6 +44,7 @@ jobs:
captureStderr: true captureStderr: true
concurrencyPolicy: Forbid concurrencyPolicy: Forbid
- name: SimpleLogin Delete Logs - name: SimpleLogin Delete Logs
command: python /code/cron.py -j delete_logs command: python /code/cron.py -j delete_logs
shell: /bin/bash shell: /bin/bash
@ -80,3 +94,17 @@ jobs:
schedule: "*/5 * * * *" schedule: "*/5 * * * *"
captureStderr: true captureStderr: true
concurrencyPolicy: Forbid concurrencyPolicy: Forbid
- name: SimpleLogin clear alias_audit_log old entries
command: python /code/cron.py -j clear_alias_audit_log
shell: /bin/bash
schedule: "0 * * * *" # Once every hour
captureStderr: true
concurrencyPolicy: Forbid
- name: SimpleLogin clear user_audit_log old entries
command: python /code/cron.py -j clear_user_audit_log
shell: /bin/bash
schedule: "0 * * * *" # Once every hour
captureStderr: true
concurrencyPolicy: Forbid

View File

@ -369,8 +369,8 @@ For ex:
"is_premium": false "is_premium": false
}, },
{ {
"signed_suffix": ".yeah@sl.local.X6_7OQ.i8XL4xsMsn7dxDEWU8eF-Zap0qo", "signed_suffix": ".yeah@sl.lan.X6_7OQ.i8XL4xsMsn7dxDEWU8eF-Zap0qo",
"suffix": ".yeah@sl.local", "suffix": ".yeah@sl.lan",
"is_custom": true, "is_custom": true,
"is_premium": false "is_premium": false
} }
@ -465,7 +465,7 @@ Here's an example:
{ {
"creation_date": "2020-04-06 17:57:14+00:00", "creation_date": "2020-04-06 17:57:14+00:00",
"creation_timestamp": 1586195834, "creation_timestamp": 1586195834,
"email": "prefix1.cat@sl.local", "email": "prefix1.cat@sl.lan",
"name": "A Name", "name": "A Name",
"enabled": true, "enabled": true,
"id": 3, "id": 3,
@ -518,7 +518,7 @@ Alias info, use the same format as in /api/v2/aliases. For example:
{ {
"creation_date": "2020-04-06 17:57:14+00:00", "creation_date": "2020-04-06 17:57:14+00:00",
"creation_timestamp": 1586195834, "creation_timestamp": 1586195834,
"email": "prefix1.cat@sl.local", "email": "prefix1.cat@sl.lan",
"name": "A Name", "name": "A Name",
"enabled": true, "enabled": true,
"id": 3, "id": 3,
@ -608,7 +608,7 @@ If success, 200 with the list of activities, for example:
"activities": [ "activities": [
{ {
"action": "reply", "action": "reply",
"from": "yes_meo_chat@sl.local", "from": "yes_meo_chat@sl.lan",
"timestamp": 1580903760, "timestamp": 1580903760,
"to": "marketing@example.com", "to": "marketing@example.com",
"reverse_alias": "\"marketing at example.com\" <reply@a.b>", "reverse_alias": "\"marketing at example.com\" <reply@a.b>",
@ -703,7 +703,7 @@ Return 200 and `existed=true` if contact is already added.
"creation_timestamp": 1584186761, "creation_timestamp": 1584186761,
"last_email_sent_date": null, "last_email_sent_date": null,
"last_email_sent_timestamp": null, "last_email_sent_timestamp": null,
"reverse_alias": "First Last first@example.com <ra+qytyzjhrumrreuszrbjxqjlkh@sl.local>", "reverse_alias": "First Last first@example.com <ra+qytyzjhrumrreuszrbjxqjlkh@sl.lan>",
"reverse_alias_address": "reply+bzvpazcdedcgcpztehxzgjgzmxskqa@sl.co", "reverse_alias_address": "reply+bzvpazcdedcgcpztehxzgjgzmxskqa@sl.co",
"existed": false "existed": false
} }
@ -992,7 +992,7 @@ Return user setting.
{ {
"alias_generator": "word", "alias_generator": "word",
"notification": true, "notification": true,
"random_alias_default_domain": "sl.local", "random_alias_default_domain": "sl.lan",
"sender_format": "AT", "sender_format": "AT",
"random_alias_suffix": "random_string" "random_alias_suffix": "random_string"
} }
@ -1029,7 +1029,7 @@ Return domains that user can use to create random alias
"is_custom": false "is_custom": false
}, },
{ {
"domain": "sl.local", "domain": "sl.lan",
"is_custom": false "is_custom": false
}, },
{ {

View File

@ -30,6 +30,7 @@ It should contain the following info:
""" """
import argparse import argparse
import email import email
import time import time
@ -52,8 +53,12 @@ from flanker.addresslib import address
from flanker.addresslib.address import EmailAddress from flanker.addresslib.address import EmailAddress
from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import IntegrityError
from app import pgp_utils, s3, config from app import pgp_utils, s3, config, contact_utils
from app.alias_utils import try_auto_create, change_alias_status from app.alias_utils import (
try_auto_create,
change_alias_status,
get_alias_recipient_name,
)
from app.config import ( from app.config import (
EMAIL_DOMAIN, EMAIL_DOMAIN,
URL, URL,
@ -145,6 +150,7 @@ from app.handler.unsubscribe_generator import UnsubscribeGenerator
from app.handler.unsubscribe_handler import UnsubscribeHandler from app.handler.unsubscribe_handler import UnsubscribeHandler
from app.log import LOG, set_message_id from app.log import LOG, set_message_id
from app.mail_sender import sl_sendmail from app.mail_sender import sl_sendmail
from app.mailbox_utils import get_mailbox_for_reply_phase
from app.message_utils import message_to_bytes from app.message_utils import message_to_bytes
from app.models import ( from app.models import (
Alias, Alias,
@ -162,18 +168,21 @@ from app.models import (
VerpType, VerpType,
SLDomain, SLDomain,
) )
from app.monitor_utils import send_version_event
from app.pgp_utils import ( from app.pgp_utils import (
PGPException, PGPException,
sign_data_with_pgpy, sign_data_with_pgpy,
sign_data, sign_data,
load_public_key_and_check, load_public_key_and_check,
) )
from app.utils import sanitize_email, canonicalize_email from app.utils import sanitize_email
from init_app import load_pgp_public_keys from init_app import load_pgp_public_keys
from server import create_light_app from server import create_light_app
def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Contact: def get_or_create_contact(
from_header: str, mail_from: str, alias: Alias
) -> Optional[Contact]:
""" """
contact_from_header is the RFC 2047 format FROM header contact_from_header is the RFC 2047 format FROM header
""" """
@ -195,80 +204,18 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
mail_from, mail_from,
) )
contact_email = mail_from contact_email = mail_from
contact_result = contact_utils.create_contact(
if not is_valid_email(contact_email): email=contact_email,
LOG.w( alias=alias,
"invalid contact email %s. Parse from %s %s", name=contact_name,
contact_email, mail_from=mail_from,
from_header, allow_empty_email=True,
mail_from, automatic_created=True,
) from_partner=False,
# either reuse a contact with empty email or create a new contact with empty email )
contact_email = "" if contact_result.error:
LOG.w(f"Error creating contact: {contact_result.error.value}")
contact_email = sanitize_email(contact_email, not_lower=True) return contact_result.contact
if contact_name and "\x00" in contact_name:
LOG.w("issue with contact name %s", contact_name)
contact_name = ""
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
if contact.name != contact_name:
LOG.d(
"Update contact %s name %s to %s",
contact,
contact.name,
contact_name,
)
contact.name = contact_name
Session.commit()
# contact created in the past does not have mail_from and from_header field
if not contact.mail_from and mail_from:
LOG.d(
"Set contact mail_from %s: %s to %s",
contact,
contact.mail_from,
mail_from,
)
contact.mail_from = mail_from
Session.commit()
else:
alias_id = alias.id
try:
contact_email_for_reply = (
contact_email if is_valid_email(contact_email) else ""
)
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias_id,
website_email=contact_email,
name=contact_name,
mail_from=mail_from,
reply_email=generate_reply_email(contact_email_for_reply, alias),
automatic_created=True,
)
if not contact_email:
LOG.d("Create a contact with invalid email for %s", alias)
contact.invalid_email = True
LOG.d(
"create contact %s for %s, reverse alias:%s",
contact_email,
alias,
contact.reply_email,
)
Session.commit()
except IntegrityError:
# No need to manually rollback, as IntegrityError already rolls back
LOG.info(
f"Contact with email {contact_email} for alias_id {alias_id} already existed, fetching from DB"
)
contact = Contact.get_by(alias_id=alias_id, website_email=contact_email)
return contact
def get_or_create_reply_to_contact( def get_or_create_reply_to_contact(
@ -293,33 +240,7 @@ def get_or_create_reply_to_contact(
) )
return None return None
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address) return contact_utils.create_contact(contact_address, alias, contact_name).contact
if contact:
return contact
else:
LOG.d(
"create contact %s for alias %s via reply-to header %s",
contact_address,
alias,
reply_to_header,
)
try:
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_address,
name=contact_name,
reply_email=generate_reply_email(contact_address, alias),
automatic_created=True,
)
Session.commit()
except IntegrityError:
LOG.w("Contact %s %s already exist", alias, contact_address)
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
return contact
def replace_header_when_forward(msg: Message, alias: Alias, header: str): def replace_header_when_forward(msg: Message, alias: Alias, header: str):
@ -644,7 +565,7 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
if not user.is_active(): if not user.is_active():
LOG.w(f"User {user} has been soft deleted") LOG.w(f"User {user} has been soft deleted")
return False, status.E502 return [(False, status.E502)]
if not user.can_send_or_receive(): if not user.can_send_or_receive():
LOG.i(f"User {user} cannot receive emails") LOG.i(f"User {user} cannot receive emails")
@ -665,19 +586,48 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
from_header = get_header_unicode(msg[headers.FROM]) from_header = get_header_unicode(msg[headers.FROM])
LOG.d("Create or get contact for from_header:%s", from_header) LOG.d("Create or get contact for from_header:%s", from_header)
contact = get_or_create_contact(from_header, envelope.mail_from, alias) contact = get_or_create_contact(from_header, envelope.mail_from, alias)
if not contact:
return [(False, status.E504)]
alias = ( alias = (
contact.alias contact.alias
) # In case the Session was closed in the get_or_create we re-fetch the alias ) # In case the Session was closed in the get_or_create we re-fetch the alias
reply_to_contact = None reply_to_contact = []
if msg[headers.REPLY_TO]: if msg[headers.REPLY_TO]:
reply_to = get_header_unicode(msg[headers.REPLY_TO]) reply_to_header_contents = get_header_unicode(msg[headers.REPLY_TO])
LOG.d("Create or get contact for reply_to_header:%s", reply_to) if reply_to_header_contents:
# ignore when reply-to = alias LOG.d(
if reply_to == alias.email: "Create or get contact for reply_to_header:%s", reply_to_header_contents
LOG.i("Reply-to same as alias %s", alias) )
else: for reply_to in [
reply_to_contact = get_or_create_reply_to_contact(reply_to, alias, msg) reply_to.strip()
for reply_to in reply_to_header_contents.split(",")
if reply_to.strip()
]:
try:
reply_to_name, reply_to_email = parse_full_address(reply_to)
except ValueError:
LOG.d(f"Could not parse reply-to address {reply_to}")
continue
if reply_to_email == alias.email:
LOG.i("Reply-to same as alias %s", alias)
else:
reply_contact = get_or_create_reply_to_contact(
reply_to_email, alias, msg
)
if reply_contact:
reply_to_contact.append(reply_contact)
if alias.user.delete_on is not None:
LOG.d(f"user {user} is pending to be deleted. Do not forward")
EmailLog.create(
contact_id=contact.id,
user_id=contact.user_id,
blocked=True,
alias_id=contact.alias_id,
commit=True,
)
return [(True, status.E502)]
if not alias.enabled or contact.block_forward: if not alias.enabled or contact.block_forward:
LOG.d("%s is disabled, do not forward", alias) LOG.d("%s is disabled, do not forward", alias)
@ -769,7 +719,7 @@ def forward_email_to_mailbox(
envelope, envelope,
mailbox, mailbox,
user, user,
reply_to_contact: Optional[Contact], reply_to_contacts: list[Contact],
) -> (bool, str): ) -> (bool, str):
LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox) LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox)
@ -817,7 +767,7 @@ def forward_email_to_mailbox(
email_log = EmailLog.create( email_log = EmailLog.create(
contact_id=contact.id, contact_id=contact.id,
user_id=user.id, user_id=contact.user_id,
mailbox_id=mailbox.id, mailbox_id=mailbox.id,
alias_id=contact.alias_id, alias_id=contact.alias_id,
message_id=str(msg[headers.MESSAGE_ID]), message_id=str(msg[headers.MESSAGE_ID]),
@ -952,11 +902,13 @@ def forward_email_to_mailbox(
add_or_replace_header(msg, "From", new_from_header) add_or_replace_header(msg, "From", new_from_header)
LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header) LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header)
if reply_to_contact: if len(reply_to_contacts) > 0:
reply_to_header = msg[headers.REPLY_TO] original_reply_to = get_header_unicode(msg[headers.REPLY_TO])
new_reply_to_header = reply_to_contact.new_addr() new_reply_to_header = ", ".join(
[reply_to_contact.new_addr() for reply_to_contact in reply_to_contacts][:5]
)
add_or_replace_header(msg, "Reply-To", new_reply_to_header) add_or_replace_header(msg, "Reply-To", new_reply_to_header)
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, reply_to_header) LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, original_reply_to)
# replace CC & To emails by reverse-alias for all emails that are not alias # replace CC & To emails by reverse-alias for all emails that are not alias
try: try:
@ -1088,7 +1040,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
return False, status.E503 return False, status.E503
user = alias.user user = alias.user
mail_from = envelope.mail_from
if not user.can_send_or_receive(): if not user.can_send_or_receive():
LOG.i(f"User {user} cannot send emails") LOG.i(f"User {user} cannot send emails")
@ -1102,13 +1053,15 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
return False, dmarc_delivery_status return False, dmarc_delivery_status
# Anti-spoofing # Anti-spoofing
mailbox = get_mailbox_from_mail_from(mail_from, alias) mailbox = get_mailbox_for_reply_phase(
envelope.mail_from, get_header_unicode(msg[headers.FROM]), alias
)
if not mailbox: if not mailbox:
if alias.disable_email_spoofing_check: if alias.disable_email_spoofing_check:
# ignore this error, use default alias mailbox # ignore this error, use default alias mailbox
LOG.w( LOG.w(
"ignore unknown sender to reverse-alias %s: %s -> %s", "ignore unknown sender to reverse-alias %s: %s -> %s",
mail_from, envelope.mail_from,
alias, alias,
contact, contact,
) )
@ -1251,23 +1204,11 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
Session.commit() Session.commit()
# make the email comes from alias recipient_name = get_alias_recipient_name(alias)
from_header = alias.email if recipient_name.message:
# add alias name from alias LOG.d(recipient_name.message)
if alias.name: LOG.d("From header is %s", recipient_name.name)
LOG.d("Put alias name %s in from header", alias.name) add_or_replace_header(msg, headers.FROM, recipient_name.name)
from_header = sl_formataddr((alias.name, alias.email))
elif alias.custom_domain:
# add alias name from domain
if alias.custom_domain.name:
LOG.d(
"Put domain default alias name %s in from header",
alias.custom_domain.name,
)
from_header = sl_formataddr((alias.custom_domain.name, alias.email))
LOG.d("From header is %s", from_header)
add_or_replace_header(msg, headers.FROM, from_header)
try: try:
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;": if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
@ -1459,32 +1400,6 @@ def replace_original_message_id(alias: Alias, email_log: EmailLog, msg: Message)
msg[headers.REFERENCES] = " ".join(new_message_ids) msg[headers.REFERENCES] = " ".join(new_message_ids)
def get_mailbox_from_mail_from(mail_from: str, alias) -> Optional[Mailbox]:
"""return the corresponding mailbox given the mail_from and alias
Usually the mail_from=mailbox.email but it can also be one of the authorized address
"""
def __check(email_address: str, alias: Alias) -> Optional[Mailbox]:
for mailbox in alias.mailboxes:
if mailbox.email == email_address:
return mailbox
for authorized_address in mailbox.authorized_addresses:
if authorized_address.email == email_address:
LOG.d(
"Found an authorized address for %s %s %s",
alias,
mailbox,
authorized_address,
)
return mailbox
return None
# We need to first check for the uncanonicalized version because we still have users in the db with the
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
return __check(mail_from, alias) or __check(canonicalize_email(mail_from), alias)
def handle_unknown_mailbox( def handle_unknown_mailbox(
envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact
): ):
@ -1600,7 +1515,9 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
LOG.w( LOG.w(
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}" f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
) )
change_alias_status(alias, enabled=False) change_alias_status(
alias, enabled=False, message=f"Set enabled=False due to {reason}"
)
Notification.create( Notification.create(
user_id=user.id, user_id=user.id,
@ -1752,7 +1669,7 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
) )
Notification.create( Notification.create(
user_id=user.id, user_id=user.id,
title=f"Email cannot be sent to { contact.email } from your alias { alias.email }", title=f"Email cannot be sent to {contact.email} from your alias {alias.email}",
message=Notification.render( message=Notification.render(
"notification/bounce-reply-phase.html", "notification/bounce-reply-phase.html",
alias=alias, alias=alias,
@ -1765,7 +1682,7 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
user, user,
ALERT_BOUNCE_EMAIL_REPLY_PHASE, ALERT_BOUNCE_EMAIL_REPLY_PHASE,
mailbox.email, mailbox.email,
f"Email cannot be sent to { contact.email } from your alias { alias.email }", f"Email cannot be sent to {contact.email} from your alias {alias.email}",
render( render(
"transactional/bounce/bounce-email-reply-phase.txt", "transactional/bounce/bounce-email-reply-phase.txt",
user=user, user=user,
@ -2445,6 +2362,7 @@ class MailHandler:
"Custom/nb_rcpt_tos", len(envelope.rcpt_tos) "Custom/nb_rcpt_tos", len(envelope.rcpt_tos)
) )
send_version_event("email_handler")
with create_light_app().app_context(): with create_light_app().app_context():
return_status = handle(envelope, msg) return_status = handle(envelope, msg)
elapsed = time.time() - start elapsed = time.time() - start
@ -2480,6 +2398,7 @@ def main(port: int):
controller.start() controller.start()
LOG.d("Start mail controller %s %s", controller.hostname, controller.port) LOG.d("Start mail controller %s %s", controller.hostname, controller.port)
send_version_event("email_handler")
if LOAD_PGP_EMAIL_HANDLER: if LOAD_PGP_EMAIL_HANDLER:
LOG.w("LOAD PGP keys") LOG.w("LOAD PGP keys")

View File

@ -2,12 +2,16 @@ import argparse
from enum import Enum from enum import Enum
from sys import argv, exit from sys import argv, exit
from app.config import DB_URI from app.config import EVENT_LISTENER_DB_URI
from app.log import LOG from app.log import LOG
from app.monitor_utils import send_version_event
from events import event_debugger
from events.runner import Runner from events.runner import Runner
from events.event_source import DeadLetterEventSource, PostgresEventSource from events.event_source import DeadLetterEventSource, PostgresEventSource
from events.event_sink import ConsoleEventSink, HttpEventSink from events.event_sink import ConsoleEventSink, HttpEventSink
_DEFAULT_MAX_RETRIES = 10
class Mode(Enum): class Mode(Enum):
DEAD_LETTER = "dead_letter" DEAD_LETTER = "dead_letter"
@ -23,13 +27,15 @@ class Mode(Enum):
raise ValueError(f"Invalid mode: {value}") raise ValueError(f"Invalid mode: {value}")
def main(mode: Mode, dry_run: bool): def main(mode: Mode, dry_run: bool, max_retries: int):
if mode == Mode.DEAD_LETTER: if mode == Mode.DEAD_LETTER:
LOG.i("Using DeadLetterEventSource") LOG.i("Using DeadLetterEventSource")
source = DeadLetterEventSource() source = DeadLetterEventSource(max_retries)
service_name = "event_listener_dead_letter"
elif mode == Mode.LISTENER: elif mode == Mode.LISTENER:
LOG.i("Using PostgresEventSource") LOG.i("Using PostgresEventSource")
source = PostgresEventSource(DB_URI) source = PostgresEventSource(EVENT_LISTENER_DB_URI)
service_name = "event_listener"
else: else:
raise ValueError(f"Invalid mode: {mode}") raise ValueError(f"Invalid mode: {mode}")
@ -40,25 +46,72 @@ def main(mode: Mode, dry_run: bool):
LOG.i("Starting with HttpEventSink") LOG.i("Starting with HttpEventSink")
sink = HttpEventSink() sink = HttpEventSink()
runner = Runner(source=source, sink=sink) send_version_event(service_name)
runner = Runner(source=source, sink=sink, service_name=service_name)
runner.run() runner.run()
def debug_event(event_id: str):
LOG.i(f"Debugging event {event_id}")
try:
event_id_int = int(event_id)
except ValueError:
raise ValueError(f"Invalid event id: {event_id}")
event_debugger.debug_event(event_id_int)
def run_event(event_id: str, delete_on_success: bool):
LOG.i(f"Running event {event_id}")
try:
event_id_int = int(event_id)
except ValueError:
raise ValueError(f"Invalid event id: {event_id}")
event_debugger.run_event(event_id_int, delete_on_success)
def args(): def args():
parser = argparse.ArgumentParser(description="Run event listener") parser = argparse.ArgumentParser(description="Run event listener")
parser.add_argument( subparsers = parser.add_subparsers(dest="command")
"mode",
help="Mode to run", listener_parser = subparsers.add_parser(Mode.LISTENER.value)
choices=[Mode.DEAD_LETTER.value, Mode.LISTENER.value], listener_parser.add_argument(
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
) )
parser.add_argument("--dry-run", help="Dry run mode", action="store_true") listener_parser.add_argument("--dry-run", action="store_true")
dead_letter_parser = subparsers.add_parser(Mode.DEAD_LETTER.value)
dead_letter_parser.add_argument(
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
)
dead_letter_parser.add_argument("--dry-run", action="store_true")
debug_parser = subparsers.add_parser("debug")
debug_parser.add_argument("event_id", help="ID of the event to debug")
run_parser = subparsers.add_parser("run")
run_parser.add_argument("event_id", help="ID of the event to run")
run_parser.add_argument("--delete-on-success", action="store_true")
return parser.parse_args() return parser.parse_args()
if __name__ == "__main__": if __name__ == "__main__":
if len(argv) < 2: if len(argv) < 2:
print("Invalid usage. Pass 'listener' or 'dead_letter' as argument") print("Invalid usage. Pass a valid subcommand as argument")
exit(1) exit(1)
args = args() args = args()
main(Mode.from_str(args.mode), args.dry_run)
if args.command in [Mode.LISTENER.value, Mode.DEAD_LETTER.value]:
main(
mode=Mode.from_str(args.command),
dry_run=args.dry_run,
max_retries=args.max_retries,
)
elif args.command == "debug":
debug_event(args.event_id)
elif args.command == "run":
run_event(args.event_id, args.delete_on_success)
else:
print("Invalid command")
exit(1)

View File

@ -0,0 +1,43 @@
from app.events.generated import event_pb2
from app.models import SyncEvent
from events.event_sink import HttpEventSink
def debug_event(event_id: int):
event = SyncEvent.get_by(id=event_id)
if not event:
print("Event not found")
return
print(f"Info for event {event_id}")
print(f"- Created at: {event.created_at}")
print(f"- Updated at: {event.updated_at}")
print(f"- Taken time: {event.taken_time}")
print(f"- Retry count: {event.retry_count}")
print()
print("Event contents")
event_contents = event.content
parsed = event_pb2.Event.FromString(event_contents)
print(f"- UserID: {parsed.user_id}")
print(f"- ExternalUserID: {parsed.external_user_id}")
print(f"- PartnerID: {parsed.partner_id}")
content = parsed.content
print(f"Content: {content}")
def run_event(event_id: int, delete_on_success: bool = True):
event = SyncEvent.get_by(id=event_id)
if not event:
print("Event not found")
return
print(f"Processing event {event_id}")
sink = HttpEventSink()
res = sink.process(event)
if res:
print(f"Processed event {event_id}")
if delete_on_success:
SyncEvent.delete(event_id, commit=True)

View File

@ -1,4 +1,5 @@
import requests import requests
import newrelic.agent
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
@ -11,6 +12,10 @@ class EventSink(ABC):
def process(self, event: SyncEvent) -> bool: def process(self, event: SyncEvent) -> bool:
pass pass
@abstractmethod
def send_data_to_webhook(self, data: bytes) -> bool:
pass
class HttpEventSink(EventSink): class HttpEventSink(EventSink):
def process(self, event: SyncEvent) -> bool: def process(self, event: SyncEvent) -> bool:
@ -20,19 +25,28 @@ class HttpEventSink(EventSink):
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}") LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
if self.send_data_to_webhook(event.content):
LOG.info(f"Event {event.id} sent successfully to webhook")
return True
return False
def send_data_to_webhook(self, data: bytes) -> bool:
res = requests.post( res = requests.post(
url=EVENT_WEBHOOK, url=EVENT_WEBHOOK,
data=event.content, data=data,
headers={"Content-Type": "application/x-protobuf"}, headers={"Content-Type": "application/x-protobuf"},
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL, verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
) )
newrelic.agent.record_custom_event(
"EventSentToPartner", {"http_code": res.status_code}
)
if res.status_code != 200: if res.status_code != 200:
LOG.warning( LOG.warning(
f"Failed to send event to webhook: {res.status_code} {res.text}" f"Failed to send event to webhook: {res.status_code} {res.text}"
) )
return False return False
else: else:
LOG.info(f"Event {event.id} sent successfully to webhook")
return True return True
@ -40,3 +54,7 @@ class ConsoleEventSink(EventSink):
def process(self, event: SyncEvent) -> bool: def process(self, event: SyncEvent) -> bool:
LOG.info(f"Handling event {event.id}") LOG.info(f"Handling event {event.id}")
return True return True
def send_data_to_webhook(self, data: bytes) -> bool:
LOG.info(f"Sending {len(data)} bytes to webhook")
return True

View File

@ -4,6 +4,8 @@ import psycopg2
import select import select
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from app.db import Session
from app.log import LOG from app.log import LOG
from app.models import SyncEvent from app.models import SyncEvent
from app.events.event_dispatcher import NOTIFICATION_CHANNEL from app.events.event_dispatcher import NOTIFICATION_CHANNEL
@ -44,6 +46,7 @@ class PostgresEventSource(EventSource):
cursor = self.__connection.cursor() cursor = self.__connection.cursor()
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};") cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
LOG.info("Starting to listen to events")
while True: while True:
if select.select([self.__connection], [], [], 5) != ([], [], []): if select.select([self.__connection], [], [], 5) != ([], [], []):
self.__connection.poll() self.__connection.poll()
@ -66,9 +69,12 @@ class PostgresEventSource(EventSource):
LOG.info(f"Could not find event with id={notify.payload}") LOG.info(f"Could not find event with id={notify.payload}")
except Exception as e: except Exception as e:
LOG.warn(f"Error getting event: {e}") LOG.warn(f"Error getting event: {e}")
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
def __connect(self): def __connect(self):
self.__connection = psycopg2.connect(self.__connection_string) self.__connection = psycopg2.connect(
self.__connection_string, application_name="sl-event-listen"
)
from app.db import Session from app.db import Session
@ -76,23 +82,33 @@ class PostgresEventSource(EventSource):
class DeadLetterEventSource(EventSource): class DeadLetterEventSource(EventSource):
def __init__(self, max_retries: int):
self.__max_retries = max_retries
def execute_loop(
self, on_event: Callable[[SyncEvent], NoReturn]
) -> list[SyncEvent]:
threshold = arrow.utcnow().shift(minutes=-_DEAD_LETTER_THRESHOLD_MINUTES)
events = SyncEvent.get_dead_letter(
older_than=threshold, max_retries=self.__max_retries
)
if events:
LOG.info(f"Got {len(events)} dead letter events")
newrelic.agent.record_custom_metric(
"Custom/dead_letter_events_to_process", len(events)
)
for event in events:
if event.mark_as_taken(allow_taken_older_than=threshold):
on_event(event)
return events
@newrelic.agent.background_task() @newrelic.agent.background_task()
def run(self, on_event: Callable[[SyncEvent], NoReturn]): def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True: while True:
try: try:
threshold = arrow.utcnow().shift( events = self.execute_loop(on_event)
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
) if not events:
events = SyncEvent.get_dead_letter(older_than=threshold)
if events:
LOG.info(f"Got {len(events)} dead letter events")
if events:
newrelic.agent.record_custom_metric(
"Custom/dead_letter_events_to_process", len(events)
)
for event in events:
on_event(event)
else:
LOG.debug("No dead letter events") LOG.debug("No dead letter events")
sleep(_DEAD_LETTER_INTERVAL_SECONDS) sleep(_DEAD_LETTER_INTERVAL_SECONDS)
except Exception as e: except Exception as e:

View File

@ -2,21 +2,26 @@ import arrow
import newrelic.agent import newrelic.agent
from app.log import LOG from app.log import LOG
from app.db import Session
from app.models import SyncEvent from app.models import SyncEvent
from app.monitor_utils import send_version_event
from events.event_sink import EventSink from events.event_sink import EventSink
from events.event_source import EventSource from events.event_source import EventSource
class Runner: class Runner:
def __init__(self, source: EventSource, sink: EventSink): def __init__(self, source: EventSource, sink: EventSink, service_name: str = ""):
self.__source = source self.__source = source
self.__sink = sink self.__sink = sink
self.__service_name = service_name
def run(self): def run(self):
self.__source.run(self.__on_event) self.__source.run(self.__on_event)
@newrelic.agent.background_task() @newrelic.agent.background_task()
def __on_event(self, event: SyncEvent): def __on_event(self, event: SyncEvent):
if self.__service_name:
send_version_event(self.__service_name)
try: try:
event_created_at = event.created_at event_created_at = event.created_at
start_time = arrow.now() start_time = arrow.now()
@ -37,6 +42,9 @@ class Runner:
"Custom/sync_event_elapsed_time", "Custom/sync_event_elapsed_time",
time_between_taken_and_created.total_seconds(), time_between_taken_and_created.total_seconds(),
) )
else:
event.retry_count = event.retry_count + 1
Session.commit()
except Exception as e: except Exception as e:
LOG.warn(f"Exception processing event [id={event.id}]: {e}") LOG.warn(f"Exception processing event [id={event.id}]: {e}")
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1) newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)

View File

@ -19,7 +19,7 @@ URL=http://localhost:7777
NOT_SEND_EMAIL=true NOT_SEND_EMAIL=true
# domain used to create alias # domain used to create alias
EMAIL_DOMAIN=sl.local EMAIL_DOMAIN=sl.lan
# Allow SimpleLogin to enforce SPF by using the extra headers from postfix # Allow SimpleLogin to enforce SPF by using the extra headers from postfix
# ENFORCE_SPF=true # ENFORCE_SPF=true
@ -37,18 +37,18 @@ EMAIL_DOMAIN=sl.local
# FIRST_ALIAS_DOMAIN = another-domain.com # FIRST_ALIAS_DOMAIN = another-domain.com
# transactional email is sent from this email address # transactional email is sent from this email address
SUPPORT_EMAIL=support@sl.local SUPPORT_EMAIL=support@sl.lan
SUPPORT_NAME=Son from SimpleLogin SUPPORT_NAME=Son from SimpleLogin
# To use VERP # To use VERP
# prefix must end with + and suffix must start with + # prefix must end with + and suffix must start with +
# BOUNCE_PREFIX = "bounces+" # BOUNCE_PREFIX = "bounces+"
# BOUNCE_SUFFIX = "+@sl.local" # BOUNCE_SUFFIX = "+@sl.lan"
# same as BOUNCE_PREFIX but used for reply phase. Note it doesn't have the plus sign (+) at the end. # same as BOUNCE_PREFIX but used for reply phase. Note it doesn't have the plus sign (+) at the end.
# BOUNCE_PREFIX_FOR_REPLY_PHASE = "bounce_reply" # BOUNCE_PREFIX_FOR_REPLY_PHASE = "bounce_reply"
# to receive general stats. # to receive general stats.
# ADMIN_EMAIL=admin@sl.local # ADMIN_EMAIL=admin@sl.lan
# Max number emails user can generate for free plan # Max number emails user can generate for free plan
# Set to 5 by default # Set to 5 by default

Some files were not shown because too many files have changed in this diff Show More