This commit is contained in:
MrMeeb 2022-12-30 16:23:27 +00:00
parent 02776e8478
commit 20da343c54
1304 changed files with 870224 additions and 0 deletions

17
app/.dockerignore Normal file
View File

@ -0,0 +1,17 @@
.idea/
*.pyc
db.sqlite
.env
.pytest_cache
.vscode
.DS_Store
config
adhoc
static/node_modules
db.sqlite-journal
static/upload
venv/
.venv
.coverage
htmlcov
.git/

26
app/.flake8 Normal file
View File

@ -0,0 +1,26 @@
[flake8]
max-line-length = 88
select = C,E,F,W,B,B902,B903,B904,B950
extend-ignore =
# For black compatibility
E203,
E501,
# Ignore "f-string is missing placeholders"
F541,
# allow bare except
E722, B001
exclude =
.git,
__pycache__,
.pytest_cache,
.venv,
static,
templates,
# migrations are generated by alembic
migrations,
docs,
shell.py
per-file-ignores =
# ignore unused imports in __init__
__init__.py:F401

3
app/.gitattributes vendored Normal file
View File

@ -0,0 +1,3 @@
# https://github.com/github/linguist#overrides
static/* linguist-vendored
docs/* linguist-documentation

2
app/.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,2 @@
## code changes will send PR to following users
* @acasajus @cquintana92 @nguyenkims

1
app/.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1 @@
open_collective: simplelogin

View File

@ -0,0 +1,39 @@
---
name: Bug report
about: Create a report to help us improve SimpleLogin.
title: ''
labels: ''
assignees: ''
---
Please note that this is only for bug report.
For help on your account, please reach out to us at hi[at]simplelogin.io. Please make sure to check out [our FAQ](https://simplelogin.io/faq/) that contains frequently asked questions.
For feature request, you can use our [forum](https://github.com/simple-login/app/discussions/categories/feature-request).
For self-hosted question/issue, please ask in [self-hosted forum](https://github.com/simple-login/app/discussions/categories/self-hosting-question)
## Prerequisites
- [ ] I have searched open and closed issues to make sure that the bug has not yet been reported.
## Bug report
**Describe the bug**
A clear and concise description of what the bug is.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Environment (If applicable):**
- OS: Linux, Mac, Windows
- Browser: Firefox, Chrome, Brave, Safari
- Version [e.g. 78]
**Additional context**
Add any other context about the problem here.

View File

@ -0,0 +1,23 @@
{
"template": "${{CHANGELOG}}\n\n<details>\n<summary>Uncategorized</summary>\n\n${{UNCATEGORIZED}}\n</details>",
"pr_template": "- ${{TITLE}} #${{NUMBER}}",
"empty_template": "- no changes",
"categories": [
{
"title": "## 🚀 Features",
"labels": ["feature"]
},
{
"title": "## 🐛 Fixes",
"labels": ["fix", "bug"]
},
{
"title": "## 🔧 Enhancements",
"labels": ["enhancement"]
}
],
"ignore_labels": ["ignore"],
"tag_resolver": {
"method": "semver"
}
}

232
app/.github/workflows/main.yml vendored Normal file
View File

@ -0,0 +1,232 @@
name: Test and lint
on:
push:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Check out repo
uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v4
with:
python-version: '3.9'
cache: 'poetry'
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction
- name: Check formatting & linting
run: |
poetry run pre-commit run --all-files
test:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: ["3.10"]
# service containers to run with `postgres-job`
services:
# label used to access the service container
postgres:
# Docker Hub image
image: postgres:13
# service environment variables
# `POSTGRES_HOST` is `postgres`
env:
# optional (defaults to `postgres`)
POSTGRES_DB: test
# required
POSTGRES_PASSWORD: test
# optional (defaults to `5432`)
POSTGRES_PORT: 5432
# optional (defaults to `postgres`)
POSTGRES_USER: test
ports:
- 15432:5432
# set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Check out repo
uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
- name: Install OS dependencies
if: ${{ matrix.python-version }} == '3.10'
run: |
sudo apt update
sudo apt install -y libre2-dev libpq-dev
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction
- name: Start Redis v6
uses: superchargejs/redis-github-action@1.1.0
with:
redis-version: 6
- name: Run db migration
run: |
CONFIG=tests/test.env poetry run alembic upgrade head
- name: Prepare version file
run: |
scripts/generate-build-info.sh ${{ github.sha }}
cat app/build_info.py
- name: Test with pytest
run: |
poetry run pytest
env:
GITHUB_ACTIONS_TEST: true
- name: Archive code coverage results
uses: actions/upload-artifact@v2
with:
name: code-coverage-report
path: htmlcov
build:
runs-on: ubuntu-latest
needs: ['test', 'lint']
if: github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v'))
steps:
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: simplelogin/app-ci
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
# We need to checkout the repository in order for the "Create Sentry release" to work
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Create Sentry release
uses: getsentry/action-release@v1
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
ignore_missing: true
ignore_empty: true
- name: Prepare version file
run: |
scripts/generate-build-info.sh ${{ github.sha }}
cat app/build_info.py
- name: Build image and publish to Docker Registry
uses: docker/build-push-action@v3
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
#- name: Send Telegram message
# uses: appleboy/telegram-action@master
# with:
# to: ${{ secrets.TELEGRAM_TO }}
# token: ${{ secrets.TELEGRAM_TOKEN }}
# args: Docker image pushed on ${{ github.ref }}
# If we have generated a tag, generate the changelog, send a notification to slack and create the GitHub release
- name: Build Changelog
id: build_changelog
if: startsWith(github.ref, 'refs/tags/v')
uses: mikepenz/release-changelog-builder-action@v3
with:
configuration: ".github/changelog_configuration.json"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Prepare Slack notification contents
if: startsWith(github.ref, 'refs/tags/v')
run: |
changelog=$(cat << EOH
${{ steps.build_changelog.outputs.changelog }}
EOH
)
messageWithoutNewlines=$(echo "${changelog}" | awk '{printf "%s\\n", $0}')
messageWithoutDoubleQuotes=$(echo "${messageWithoutNewlines}" | sed "s/\"/'/g")
echo "${messageWithoutDoubleQuotes}"
echo "SLACK_CHANGELOG=${messageWithoutDoubleQuotes}" >> $GITHUB_ENV
- name: Post notification to Slack
uses: slackapi/slack-github-action@v1.19.0
if: startsWith(github.ref, 'refs/tags/v')
with:
channel-id: ${{ secrets.SLACK_CHANNEL_ID }}
payload: |
{
"blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": "New tag created",
"emoji": true
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*Tag: ${{ github.ref_name }}* (${{ github.sha }})"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*Changelog:*\n${{ env.SLACK_CHANGELOG }}"
}
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
- name: Create GitHub Release
if: startsWith(github.ref, 'refs/tags/v')
uses: actions/create-release@v1
with:
tag_name: ${{ github.ref }}
release_name: ${{ github.ref }}
body: ${{ steps.build_changelog.outputs.changelog }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

17
app/.gitignore vendored Normal file
View File

@ -0,0 +1,17 @@
.idea/
*.pyc
db.sqlite
.env
.pytest_cache
.vscode
.DS_Store
config
static/node_modules
db.sqlite-journal
static/upload
venv/
.venv
.python-version
.coverage
htmlcov
adhoc

3
app/.jshintrc Normal file
View File

@ -0,0 +1,3 @@
{
"esversion": 8
}

View File

@ -0,0 +1,23 @@
exclude: "(migrations|static/node_modules|static/assets|static/vendor)"
default_language_version:
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
- id: check-yaml
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 3.9.2
hooks:
- id: flake8
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.3.0
hooks:
- id: djlint-jinja
files: '.*\.html'
entry: djlint --reformat

227
app/.pylintrc Normal file
View File

@ -0,0 +1,227 @@
[MASTER]
extension-pkg-allow-list=re2
fail-under=7.0
ignore=CVS
ignore-paths=migrations
ignore-patterns=^\.#
jobs=0
[MESSAGES CONTROL]
disable=missing-function-docstring,
missing-module-docstring,
duplicate-code,
#import-error,
missing-class-docstring,
useless-object-inheritance,
use-dict-literal,
logging-format-interpolation,
consider-using-f-string,
unnecessary-comprehension,
inconsistent-return-statements,
wrong-import-order,
line-too-long,
invalid-name,
global-statement,
no-else-return,
unspecified-encoding,
logging-fstring-interpolation,
too-few-public-methods,
bare-except,
fixme,
unnecessary-pass,
f-string-without-interpolation,
super-init-not-called,
unused-argument,
ungrouped-imports,
too-many-locals,
consider-using-with,
too-many-statements,
consider-using-set-comprehension,
unidiomatic-typecheck,
useless-else-on-loop,
too-many-return-statements,
broad-except,
protected-access,
consider-using-enumerate,
too-many-nested-blocks,
too-many-branches,
simplifiable-if-expression,
possibly-unused-variable,
pointless-string-statement,
wrong-import-position,
redefined-outer-name,
raise-missing-from,
logging-too-few-args,
redefined-builtin,
too-many-arguments,
import-outside-toplevel,
redefined-argument-from-local,
logging-too-many-args,
too-many-instance-attributes,
unreachable,
no-name-in-module,
no-member,
consider-using-ternary,
too-many-lines,
arguments-differ,
too-many-public-methods,
unused-variable,
consider-using-dict-items,
consider-using-in,
reimported,
too-many-boolean-expressions,
cyclic-import,
not-callable, # (paddle_utils.py) verifier.verify cannot be called (although it can)
abstract-method, # (models.py)
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[FORMAT]
max-line-length=88
single-line-if-stmt=yes

1
app/.version Normal file
View File

@ -0,0 +1 @@
dev

127
app/CHANGELOG Normal file
View File

@ -0,0 +1,127 @@
# Changelog
All notable changes to SimpleLogin will be documented in this file.
The version corresponds to SimpleLogin Docker `image tag`.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [3.4.0] - 2021-04-06
Support ARM arch
Remove unused config like DEBUG, CLOUDWATCH, DKIM_PUBLIC_KEY_PATH, DKIM_DNS_VALUE
Handle auto responder email
Inform user when their alias has been transferred to another user
Use alias transfer_token
Improve logging
Add /api/export/data, /api/export/aliases endpoints
Take into account mailbox when importing/exporting aliases
Multiple bug fixes
Code refactoring
Add ENABLE_SPAM_ASSASSIN config
## [3.3.0] - 2021-03-05
Notify user when reply cannot be sent
User can choose default domain for random alias
enable LOCAL_FILE_UPLOAD by default
fix user has to login again after quitting the browser
login user in api auth endpoints
Create POST /api/api_key
Add GET /api/logout
Add setup-done page
Add PublicDomain
User can choose a random alias domain in a list of public domains
User can choose mailboxes for a domain
Return support_pgp in GET /api/v2/aliases
Self hosting improvements
Improve Search
Use poetry instead of pip
Add PATCH /api/user_info
Add GET /api/setting
Add GET /api/setting/domains
Add PATCH /api/setting
Add "Generic Subject" option
Add /v2/setting/domains
Add /api/v5/alias/options
Add GET /api/custom_domains
Add GET /api/custom_domains/:custom_domain_id/trash
Able to disable a directory
Use VERP: send email from bounce address
Use VERP for transactional email: remove SENDER, SENDER_DIR
Use "John Wick - john at wick.com" as default sender format
Able to transfer an alias
## [3.2.2] - 2020-06-15
Fix POST /v2/alias/custom/new when DISABLE_ALIAS_SUFFIX is set
## [3.2.1] - 2020-06-15
Fix regressions introduced in 3.2.0 regarding DISABLE_ALIAS_SUFFIX option
## [3.2.0] - 2020-06-10
Make FIDO available
Fix "remove the reverse-alias" when replying
Update GET /mailboxes
Create POST /api/v3/alias/custom/new
Add PGP for contact
## [3.1.1] - 2020-05-27
Fix alias creation
## [3.1.0] - 2020-05-09
Remove social login signup
More simple UI with advanced options hidden by default
Use pagination for alias page
Use Ajax for alias note and mailbox update
Alias can have a name
Global stats
DMARC support for custom domain
Enforce SPF
FIDO support (beta)
Able to disable onboarding emails
## [3.0.1] - 2020-04-13
Fix compatibility with 2x version
Fix "Content-Transfer-Encoding" issue https://github.com/simple-login/app/issues/125
## [3.0.0] - 2020-04-13
New endpoints to create/update aliases:
PUT /api/aliases/:alias_id
GET /api/aliases/:alias_id/contacts
POST /api/aliases/:alias_id/contacts
GET /api/v2/aliases
(Optional) Spam detection by Spamassassin
Handling for bounced emails
Support Multiple recipients (in To and Cc headers)
## [2.1.0] - 2020-03-23
Support PGP
## [2.0.0] - 2020-03-13
Support multiple Mailboxes
Take into account Sender header
## [1.0.5] - 2020-02-24
Improve email forwarding.
Minor improvements on monitoring.
## [1.0.4] - 2020-02-09
Fix duplicate "List-Unsubscribe" email header.
## [1.0.3] - 2020-01-28
Add DISABLE_REGISTRATION param to disable new registrations.
## [1.0.2] - 2020-01-28
Add SUPPORT_NAME param to set a support email name.
## [1.0.1] - 2020-01-28
Simplify config file.
## [1.0.0] - 2020-01-22
Start tagging docker image.
Docker image tag is used in README to make sure SimpleLogin new Docker images don't break previous deployments.

216
app/CONTRIBUTING.md Normal file
View File

@ -0,0 +1,216 @@
Thanks for taking the time to contribute! 🎉👍
Before working on a new feature, please get in touch with us at dev[at]simplelogin.io to avoid duplication.
We can also discuss the best way to implement it.
The project uses Flask, Python3.7+ and requires Postgres 12+ as dependency.
## General Architecture
<p align="center">
<img src="./docs/archi.png" height="450px">
</p>
SimpleLogin backend consists of 2 main components:
- the `webapp` used by several clients: the web app, the browser extensions (Chrome & Firefox for now), OAuth clients (apps that integrate "Sign in with SimpleLogin" button) and mobile apps.
- the `email handler`: implements the email forwarding (i.e. alias receiving email) and email sending (i.e. alias sending email).
## Install dependencies
The project requires:
- Python 3.7+ and [poetry](https://python-poetry.org/) to manage dependencies
- Node v10 for front-end.
- Postgres 12+
First, install all dependencies by running the following command.
Feel free to use `virtualenv` or similar tools to isolate development environment.
```bash
poetry install
```
On Mac, sometimes you might need to install some other packages via `brew`:
```bash
brew install pkg-config libffi openssl postgresql
```
You also need to install `gpg` tool, on Mac it can be done with:
```bash
brew install gnupg
```
If you see the `pyre2` package in the error message, you might need to install its dependencies with `brew`.
More info on https://github.com/andreasvc/pyre2
```bash
brew install -s re2 pybind11
```
## Linting and static analysis
We use pre-commit to run all our linting and static analysis checks. Please run
```bash
poetry run pre-commit install
```
To install it in your development environment.
## Run tests
For most tests, you will need to have ``redis`` installed and started on your machine (listening on port 6379).
```bash
sh scripts/run-test.sh
```
## Run the code locally
Install npm packages
```bash
cd static && npm install
```
To run the code locally, please create a local setting file based on `example.env`:
```
cp example.env .env
```
You need to edit your .env to reflect the postgres exposed port, edit the `DB_URI` to:
```
DB_URI=postgresql://myuser:mypassword@localhost:35432/simplelogin
```
Run the postgres database:
```bash
docker run -e POSTGRES_PASSWORD=mypassword -e POSTGRES_USER=myuser -e POSTGRES_DB=simplelogin -p 15432:5432 postgres:13
```
To run the server:
```
alembic upgrade head && flask dummy-data && python3 server.py
```
then open http://localhost:7777, you should be able to login with `john@wick.com / password` account.
You might need to change the `.env` file for developing certain features. This file is ignored by git.
## Database migration
The database migration is handled by `alembic`
Whenever the model changes, a new migration has to be created.
If you have Docker installed, you can create the migration by the following script:
```bash
sh scripts/new-migration.sh
```
Make sure to review the migration script before committing it.
Sometimes (very rarely though), the automatically generated script can be incorrect.
We cannot use the local database to generate migration script as the local database doesn't use migration.
It is created via `db.create_all()` (cf `fake_data()` method). This is convenient for development and
unit tests as we don't have to wait for the migration.
## Reset database
There are two scripts to reset your local db to an empty state:
- `scripts/reset_local_db.sh` will reset your development db to the latest migration version and add the development data needed to run the
server.py locally.
- `scripts/reset_test_db.sh` will reset your test db to the latest migration without adding the dev server data to prevent interferring with
the tests.
## Code structure
The repo consists of the three following entry points:
- wsgi.py and server.py: the webapp.
- email_handler.py: the email handler.
- cron.py: the cronjob.
Here are the small sum-ups of the directory structures and their roles:
- app/: main Flask app. It is structured into different packages representing different features like oauth, api, dashboard, etc.
- local_data/: contains files to facilitate the local development. They are replaced during the deployment.
- migrations/: generated by flask-migrate. Edit these files will be only edited when you spot (very rare) errors on the database migration files.
- static/: files available at `/static` url.
- templates/: contains both html and email templates.
- tests/: tests. We don't really distinguish unit, functional or integration test. A test is simply here to make sure a feature works correctly.
## Pull request
The code is formatted using https://github.com/psf/black, to format the code, simply run
```
poetry run black .
```
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
```bash
poetry run flake8
```
For HTML templates, we use `djlint`. Before creating a pull request, please run
```bash
poetry run djlint --check templates
```
## Test sending email
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
[mailcatcher](https://github.com/sj26/mailcatcher) or [MailHog](https://github.com/mailhog/MailHog) can be used as a MTA to receive emails.
Here's how set up the email handler:
1) run mailcatcher or MailHog
```bash
mailcatcher
```
2) Make sure to set the following variables in the `.env` file
```
# comment out this variable
# NOT_SEND_EMAIL=true
# So the emails will be sent to mailcatcher/MailHog
POSTFIX_SERVER=localhost
POSTFIX_PORT=1025
```
3) Run email_handler
```bash
python email_handler.py
```
4) Send a test email
```bash
swaks --to e1@sl.local --from hey@google.com --server 127.0.0.1:20381
```
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
## Job runner
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
```bash
python job_runner.py
```

47
app/Dockerfile Normal file
View File

@ -0,0 +1,47 @@
# Install npm packages
FROM node:10.17.0-alpine AS npm
WORKDIR /code
COPY ./static/package*.json /code/static/
RUN cd /code/static && npm install
# Main image
FROM python:3.10
# Keeps Python from generating .pyc files in the container
ENV PYTHONDONTWRITEBYTECODE 1
# Turns off buffering for easier container logging
ENV PYTHONUNBUFFERED 1
# Add poetry to PATH
ENV PATH="${PATH}:/root/.local/bin"
WORKDIR /code
# Copy poetry files
COPY poetry.lock pyproject.toml ./
# Install and setup poetry
RUN pip install -U pip \
&& apt-get update \
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
&& curl -sSL https://install.python-poetry.org | python3 - \
# Remove curl and netcat from the image
&& apt-get purge -y curl netcat \
# Run poetry
&& poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi --no-root \
# Clear apt cache \
&& apt-get purge -y libre2-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# copy npm packages
COPY --from=npm /code /code
# copy everything else into /code
COPY . .
EXPOSE 7777
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
CMD ["gunicorn","wsgi:app","-b","0.0.0.0:7777","-w","2","--timeout","15"]

661
app/LICENSE Normal file
View File

@ -0,0 +1,661 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

575
app/README.md Normal file
View File

@ -0,0 +1,575 @@
[SimpleLogin](https://simplelogin.io) | Protect your online identity with email alias
---
<p>
<a href="https://chrome.google.com/webstore/detail/dphilobhebphkdjbpfohgikllaljmgbn">
<img src="https://img.shields.io/chrome-web-store/rating/dphilobhebphkdjbpfohgikllaljmgbn?label=Chrome%20Extension">
</a>
<a href="https://addons.mozilla.org/firefox/addon/simplelogin/">
<img src="https://img.shields.io/amo/rating/simplelogin?label=Firefox%20Add-On&logo=SimpleLogin">
</a>
<a href="./LICENSE">
<img src="https://img.shields.io/github/license/simple-login/app">
</a>
<a href="https://twitter.com/simplelogin">
<img src="https://img.shields.io/twitter/follow/simplelogin?style=social">
</a>
</p>
<p align="center">
<a href="https://simplelogin.io">
<img src="./docs/hero.png" height="600px">
</a>
</p>
---
Your email address is your **online identity**. When you use the same email address everywhere, you can be easily tracked.
More information on https://simplelogin.io
This README contains instructions on how to self host SimpleLogin.
Once you have your own SimpleLogin instance running, you can change the `API URL` in SimpleLogin's Chrome/Firefox extension, Android/iOS app to your server.
SimpleLogin roadmap is at https://github.com/simple-login/app/projects/1 and our forum at https://github.com/simple-login/app/discussions, feel free to submit new ideas or vote on features.
### Prerequisites
- a Linux server (either a VM or dedicated server). This doc shows the setup for Ubuntu 18.04 LTS but the steps could be adapted for other popular Linux distributions. As most of components run as Docker container and Docker can be a bit heavy, having at least 2 GB of RAM is recommended. The server needs to have the port 25 (email), 80, 443 (for the webapp), 22 (so you can ssh into it) open.
- a domain that you can config the DNS. It could be a sub-domain. In the rest of the doc, let's say it's `mydomain.com` for the email and `app.mydomain.com` for SimpleLogin webapp. Please make sure to replace these values by your domain name whenever they appear in the doc. A trick we use is to download this README file on your computer and replace all `mydomain.com` occurrences by your domain.
Except for the DNS setup that is usually done on your domain registrar interface, all the below steps are to be done on your server. The commands are to run with `bash` (or any bash-compatible shell like `zsh`) being the shell. If you use other shells like `fish`, please make sure to adapt the commands.
### Some utility packages
These packages are used to verify the setup. Install them by:
```bash
sudo apt update && sudo apt install -y dnsutils
```
Create a directory to store SimpleLogin data:
```bash
mkdir sl
mkdir sl/pgp # to store PGP key
mkdir sl/db # to store database
mkdir sl/upload # to store quarantine emails
```
### DKIM
From Wikipedia https://en.wikipedia.org/wiki/DomainKeys_Identified_Mail
> DomainKeys Identified Mail (DKIM) is an email authentication method designed to detect forged sender addresses in emails (email spoofing), a technique often used in phishing and email spam.
Setting up DKIM is highly recommended to reduce the chance your emails ending up in the recipient's Spam folder.
First you need to generate a private and public key for DKIM:
```bash
openssl genrsa -out dkim.key 1024
openssl rsa -in dkim.key -pubout -out dkim.pub.key
```
You will need the files `dkim.key` and `dkim.pub.key` for the next steps.
For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplicity as some registrars don't play well with long TXT record.
### DNS
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain.
#### MX record
Create a **MX record** that points `mydomain.com.` to `app.mydomain.com.` with priority 10.
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 mydomain.com mx
```
should return:
```
mydomain.com. 3600 IN MX 10 app.mydomain.com.
```
#### A record
An **A record** that points `app.mydomain.com.` to your server IP.
If you are using CloudFlare, we recommend to disable the "Proxy" option.
To verify, the following command
```bash
dig @1.1.1.1 app.mydomain.com a
```
should return your server IP.
#### DKIM
Set up DKIM by adding a TXT record for `dkim._domainkey.mydomain.com.` with the following value:
```
v=DKIM1; k=rsa; p=PUBLIC_KEY
```
with `PUBLIC_KEY` being your `dkim.pub.key` but
- remove the `-----BEGIN PUBLIC KEY-----` and `-----END PUBLIC KEY-----`
- join all the lines on a single line.
For example, if your `dkim.pub.key` is
```
-----BEGIN PUBLIC KEY-----
ab
cd
ef
gh
-----END PUBLIC KEY-----
```
then the `PUBLIC_KEY` would be `abcdefgh`.
You can get the `PUBLIC_KEY` by running this command:
```bash
sed "s/-----BEGIN PUBLIC KEY-----/v=DKIM1; k=rsa; p=/g" $(pwd)/dkim.pub.key | sed 's/-----END PUBLIC KEY-----//g' |tr -d '\n' | awk 1
```
To verify, the following command
```bash
dig @1.1.1.1 dkim._domainkey.mydomain.com txt
```
should return the above value.
#### SPF
From Wikipedia https://en.wikipedia.org/wiki/Sender_Policy_Framework
> Sender Policy Framework (SPF) is an email authentication method designed to detect forging sender addresses during the delivery of the email
Similar to DKIM, setting up SPF is highly recommended.
Add a TXT record for `mydomain.com.` with the value:
```
v=spf1 mx ~all
```
What it means is only your server can send email with `@mydomain.com` domain.
To verify, the following command
```bash
dig @1.1.1.1 mydomain.com txt
```
should return the above value.
#### DMARC
From Wikipedia https://en.wikipedia.org/wiki/DMARC
> It (DMARC) is designed to give email domain owners the ability to protect their domain from unauthorized use, commonly known as email spoofing
Setting up DMARC is also recommended.
Add a TXT record for `_dmarc.mydomain.com.` with the following value
```
v=DMARC1; p=quarantine; adkim=r; aspf=r
```
This is a `relaxed` DMARC policy. You can also use a more strict policy with `v=DMARC1; p=reject; adkim=s; aspf=s` value.
To verify, the following command
```bash
dig @1.1.1.1 _dmarc.mydomain.com txt
```
should return the set value.
For more information on DMARC, please consult https://tools.ietf.org/html/rfc7489
### Docker
Now the boring DNS stuffs are done, let's do something more fun!
If you don't already have Docker installed on your server, please follow the steps on [Docker CE for Ubuntu](https://docs.docker.com/v17.12/install/linux/docker-ce/ubuntu/) to install Docker.
You can also install Docker using the [docker-install](https://github.com/docker/docker-install) script which is
```bash
curl -fsSL https://get.docker.com | sh
```
### Prepare the Docker network
This Docker network will be used by the other Docker containers run in the next steps.
Later, we will setup Postfix to authorize this network.
```bash
sudo docker network create -d bridge \
--subnet=10.0.0.0/24 \
--gateway=10.0.0.1 \
sl-network
```
### Postgres
This section creates a Postgres database using Docker.
If you already have a Postgres database in use, you can skip this section and just copy the database configuration (i.e. host, port, username, password, database name) to use in the next sections.
Run a Postgres Docker container as your Postgres database server. Make sure to replace `myuser` and `mypassword` with something more secret.
```bash
docker run -d \
--name sl-db \
-e POSTGRES_PASSWORD=mypassword \
-e POSTGRES_USER=myuser \
-e POSTGRES_DB=simplelogin \
-p 127.0.0.1:5432:5432 \
-v $(pwd)/sl/db:/var/lib/postgresql/data \
--restart always \
--network="sl-network" \
postgres:12.1
```
To test whether the database operates correctly or not, run the following command:
```bash
docker exec -it sl-db psql -U myuser simplelogin
```
you should be logged in the postgres console. Type `exit` to exit postgres console.
### Postfix
Install `postfix` and `postfix-pgsql`. The latter is used to connect Postfix and the Postgres database in the next steps.
```bash
sudo apt-get install -y postfix postfix-pgsql -y
```
Choose "Internet Site" in Postfix installation window then keep using the proposed value as *System mail name* in the next window.
![](./docs/postfix-installation.png)
![](./docs/postfix-installation2.png)
Replace `/etc/postfix/main.cf` with the following content. Make sure to replace `mydomain.com` by your domain.
```
# POSTFIX config file, adapted for SimpleLogin
smtpd_banner = $myhostname ESMTP $mail_name (Ubuntu)
biff = no
# appending .domain is the MUA's job.
append_dot_mydomain = no
# Uncomment the next line to generate "delayed mail" warnings
#delay_warning_time = 4h
readme_directory = no
# See http://www.postfix.org/COMPATIBILITY_README.html -- default to 2 on
# fresh installs.
compatibility_level = 2
# TLS parameters
smtpd_tls_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem
smtpd_tls_key_file=/etc/ssl/private/ssl-cert-snakeoil.key
smtpd_tls_session_cache_database = btree:${data_directory}/smtpd_scache
smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
smtp_tls_security_level = may
smtpd_tls_security_level = may
# See /usr/share/doc/postfix/TLS_README.gz in the postfix-doc package for
# information on enabling SSL in the smtp client.
alias_maps = hash:/etc/aliases
mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128 10.0.0.0/24
# Set your domain here
mydestination =
myhostname = app.mydomain.com
mydomain = mydomain.com
myorigin = mydomain.com
relay_domains = pgsql:/etc/postfix/pgsql-relay-domains.cf
transport_maps = pgsql:/etc/postfix/pgsql-transport-maps.cf
# HELO restrictions
smtpd_delay_reject = yes
smtpd_helo_required = yes
smtpd_helo_restrictions =
permit_mynetworks,
reject_non_fqdn_helo_hostname,
reject_invalid_helo_hostname,
permit
# Sender restrictions:
smtpd_sender_restrictions =
permit_mynetworks,
reject_non_fqdn_sender,
reject_unknown_sender_domain,
permit
# Recipient restrictions:
smtpd_recipient_restrictions =
reject_unauth_pipelining,
reject_non_fqdn_recipient,
reject_unknown_recipient_domain,
permit_mynetworks,
reject_unauth_destination,
reject_rbl_client zen.spamhaus.org,
reject_rbl_client bl.spamcop.net,
permit
```
Create the `/etc/postfix/pgsql-relay-domains.cf` file with the following content.
Make sure that the database config is correctly set, replace `mydomain.com` with your domain, update 'myuser' and 'mypassword' with your postgres credentials.
```
# postgres config
hosts = localhost
user = myuser
password = mypassword
dbname = simplelogin
query = SELECT domain FROM custom_domain WHERE domain='%s' AND verified=true
UNION SELECT '%s' WHERE '%s' = 'mydomain.com' LIMIT 1;
```
Create the `/etc/postfix/pgsql-transport-maps.cf` file with the following content.
Again, make sure that the database config is correctly set, replace `mydomain.com` with your domain, update 'myuser' and 'mypassword' with your postgres credentials.
```
# postgres config
hosts = localhost
user = myuser
password = mypassword
dbname = simplelogin
# forward to smtp:127.0.0.1:20381 for custom domain AND email domain
query = SELECT 'smtp:127.0.0.1:20381' FROM custom_domain WHERE domain = '%s' AND verified=true
UNION SELECT 'smtp:127.0.0.1:20381' WHERE '%s' = 'mydomain.com' LIMIT 1;
```
Finally, restart Postfix
```bash
sudo systemctl restart postfix
```
### Run SimpleLogin Docker containers
To run SimpleLogin, you need a config file at `$(pwd)/simplelogin.env`. Below is an example that you can use right away, make sure to
- replace `mydomain.com` by your domain,
- set `FLASK_SECRET` to a secret string,
- update 'myuser' and 'mypassword' with your database credentials used in previous step.
All possible parameters can be found in [config example](example.env). Some are optional and are commented out by default.
Some have "dummy" values, fill them up if you want to enable these features (Paddle, AWS, etc).
```.env
# WebApp URL
URL=http://app.mydomain.com
# domain used to create alias
EMAIL_DOMAIN=mydomain.com
# transactional email is sent from this email address
SUPPORT_EMAIL=support@mydomain.com
# custom domain needs to point to these MX servers
EMAIL_SERVERS_WITH_PRIORITY=[(10, "app.mydomain.com.")]
# By default, new aliases must end with ".{random_word}". This is to avoid a person taking all "nice" aliases.
# this option doesn't make sense in self-hosted. Set this variable to disable this option.
DISABLE_ALIAS_SUFFIX=1
# the DKIM private key used to compute DKIM-Signature
DKIM_PRIVATE_KEY_PATH=/dkim.key
# DB Connection
DB_URI=postgresql://myuser:mypassword@sl-db:5432/simplelogin
FLASK_SECRET=put_something_secret_here
GNUPGHOME=/sl/pgp
LOCAL_FILE_UPLOAD=1
POSTFIX_SERVER=10.0.0.1
```
Before running the webapp, you need to prepare the database by running the migration:
```bash
docker run --rm \
--name sl-migration \
-v $(pwd)/sl:/sl \
-v $(pwd)/sl/upload:/code/static/upload \
-v $(pwd)/dkim.key:/dkim.key \
-v $(pwd)/dkim.pub.key:/dkim.pub.key \
-v $(pwd)/simplelogin.env:/code/.env \
--network="sl-network" \
simplelogin/app:3.4.0 flask db upgrade
```
This command could take a while to download the `simplelogin/app` docker image.
Init data
```bash
docker run --rm \
--name sl-init \
-v $(pwd)/sl:/sl \
-v $(pwd)/simplelogin.env:/code/.env \
-v $(pwd)/dkim.key:/dkim.key \
-v $(pwd)/dkim.pub.key:/dkim.pub.key \
--network="sl-network" \
simplelogin/app:3.4.0 python init_app.py
```
Now, it's time to run the `webapp` container!
```bash
docker run -d \
--name sl-app \
-v $(pwd)/sl:/sl \
-v $(pwd)/sl/upload:/code/static/upload \
-v $(pwd)/simplelogin.env:/code/.env \
-v $(pwd)/dkim.key:/dkim.key \
-v $(pwd)/dkim.pub.key:/dkim.pub.key \
-p 127.0.0.1:7777:7777 \
--restart always \
--network="sl-network" \
simplelogin/app:3.4.0
```
Next run the `email handler`
```bash
docker run -d \
--name sl-email \
-v $(pwd)/sl:/sl \
-v $(pwd)/sl/upload:/code/static/upload \
-v $(pwd)/simplelogin.env:/code/.env \
-v $(pwd)/dkim.key:/dkim.key \
-v $(pwd)/dkim.pub.key:/dkim.pub.key \
-p 127.0.0.1:20381:20381 \
--restart always \
--network="sl-network" \
simplelogin/app:3.4.0 python email_handler.py
```
And finally the `job runner`
```bash
docker run -d \
--name sl-job-runner \
-v $(pwd)/sl:/sl \
-v $(pwd)/sl/upload:/code/static/upload \
-v $(pwd)/simplelogin.env:/code/.env \
-v $(pwd)/dkim.key:/dkim.key \
-v $(pwd)/dkim.pub.key:/dkim.pub.key \
--restart always \
--network="sl-network" \
simplelogin/app:3.4.0 python job_runner.py
```
### Nginx
Install Nginx and make sure to replace `mydomain.com` by your domain
```bash
sudo apt-get install -y nginx
```
Then, create `/etc/nginx/sites-enabled/simplelogin` with the following lines:
```nginx
server {
server_name app.mydomain.com;
location / {
proxy_pass http://localhost:7777;
}
}
```
Reload Nginx with the command below
```bash
sudo systemctl reload nginx
```
At this step, you should also setup the SSL for Nginx. [Here's our guide how](./docs/ssl.md).
### Enjoy!
If all the above steps are successful, open http://app.mydomain.com/ and create your first account!
By default, new accounts are not premium so don't have unlimited alias. To make your account premium,
please go to the database, table "users" and set "lifetime" column to "1" or "TRUE":
```
docker exec -it sl-db psql -U myuser simplelogin
UPDATE users SET lifetime = TRUE;
exit
```
Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations:
```
DISABLE_REGISTRATION=1
DISABLE_ONBOARDING=true
```
Then restart the web app to apply: `docker restart sl-app`
### Donations Welcome
You don't have to pay anything to SimpleLogin to use all its features.
If you like the project, you can make a donation on our Open Collective page at https://opencollective.com/simplelogin
### Misc
The above self-hosting instructions correspond to a freshly Ubuntu server and doesn't cover all possible server configuration.
Below are pointers to different topics:
- [Troubleshooting](docs/troubleshooting.md)
- [Enable SSL](docs/ssl.md)
- [UFW - uncomplicated firewall](docs/ufw.md)
- [SES - Amazon Simple Email Service](docs/ses.md)
- [Upgrade existing SimpleLogin installation](docs/upgrade.md)
- [Enforce SPF](docs/enforce-spf.md)
- [Postfix TLS](docs/postfix-tls.md)
## ❤️ Contributors
Thanks go to these wonderful people:
<table>
<tr>
<td align="center"><a href="https://www.linkedin.com/in/vandungnguyen/"><img src="https://simplelogin.io/about/dung.jpg" width="100px;" alt="Dung Nguyen Van"/><br /><sub><b>Dung Nguyen Van</b></sub></a><br /></td>
<td align="center"><a href="https://www.linkedin.com/in/giuseppe-f-83449ba4/"><img src="https://simplelogin.io/about/giuseppe.jpeg" width="100px;" alt="Giuseppe Federico"/><br /><sub><b>Giuseppe Federico</b></sub></a><br /></td>
<td align="center"><a href="https://github.com/NinhDinh"><img src="https://avatars2.githubusercontent.com/u/1419742?s=460&v=4" width="100px;" alt="Ninh Dinh"/><br /><sub><b>Ninh Dinh</b></sub></a><br /></td>
<td align="center"><a href="https://github.com/ntung"><img src="https://avatars1.githubusercontent.com/u/663341?s=460&v=4" width="100px;" alt="Tung Nguyen V. N."/><br /><sub><b>Tung Nguyen V. N.</b></sub></a><br /></td>
<td align="center"><a href="https://www.linkedin.com/in/nguyenkims/"><img src="https://simplelogin.io/about/me.jpeg" width="100px;" alt="Son Nguyen Kim"/><br /><sub><b>Son Nguyen Kim</b></sub></a><br /></td>
<td align="center"><a href="https://github.com/developStorm"><img src="https://avatars1.githubusercontent.com/u/59678453?s=460&u=3813d29a125b3edeb44019234672b704f7b9b76a&v=4" width="100px;" alt="Raymond Nook"/><br /><sub><b>Raymond Nook</b></sub></a><br /></td>
<td align="center"><a href="https://github.com/SibrenVasse"><img src="https://avatars1.githubusercontent.com/u/5833571?s=460&u=78aea62ffc215885a0319437fc629a7596ddea31&v=4" width="100px;" alt="Sibren Vasse"/><br /><sub><b>Sibren Vasse</b></sub></a><br /></td>
<td align="center"><a href="https://github.com/TheLastProject"><img src="https://avatars.githubusercontent.com/u/1885159?s=460&u=ebeeb346c4083c0d493a134f4774f925d3437f98&v=4" width="100px;" alt="Sylvia van Os"/><br /><sub><b>Sylvia van Os</b></sub></a><br /></td>
</tr>
</table>

14
app/SECURITY.md Normal file
View File

@ -0,0 +1,14 @@
# Security Policy
## Supported Versions
We only add security updates to the latest MAJOR.MINOR version of the project. No security updates are backported to previous versions.
If you want be up to date on security patches, make sure your SimpleLogin image is up to date.
## Reporting a Vulnerability
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io.
We will review the potential threat and fix it as fast as we can.
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.

83
app/alembic.ini Normal file
View File

@ -0,0 +1,83 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
file_template = %%(year)d_%%(month).2d%%(day).2d%%(hour).2d_%%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

0
app/app/__init__.py Normal file
View File

288
app/app/account_linking.py Normal file
View File

@ -0,0 +1,288 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from arrow import Arrow
from newrelic import agent
from app.db import Session
from app.email_utils import send_welcome_email
from app.utils import sanitize_email
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
from app.log import LOG
from app.models import (
PartnerSubscription,
Partner,
PartnerUser,
User,
)
from app.utils import random_string
class SLPlanType(Enum):
Free = 1
Premium = 2
@dataclass
class SLPlan:
type: SLPlanType
expiration: Optional[Arrow]
@dataclass
class PartnerLinkRequest:
name: str
email: str
external_user_id: str
plan: SLPlan
from_partner: bool
@dataclass
class LinkResult:
user: User
strategy: str
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
if plan.type == SLPlanType.Free:
if sub is not None:
LOG.i(
f"Deleting partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
)
PartnerSubscription.delete(sub.id)
agent.record_custom_event("PlanChange", {"plan": "free"})
else:
if sub is None:
LOG.i(
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
)
PartnerSubscription.create(
partner_user_id=partner_user.id,
end_at=plan.expiration,
)
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
else:
if sub.end_at != plan.expiration:
LOG.i(
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
)
agent.record_custom_event(
"PlanChange", {"plan": "premium", "type": "extension"}
)
sub.end_at = plan.expiration
Session.commit()
def set_plan_for_user(user: User, plan: SLPlan, partner: Partner):
partner_user = PartnerUser.get_by(partner_id=partner.id, user_id=user.id)
if partner_user is None:
return
return set_plan_for_partner_user(partner_user, plan)
def ensure_partner_user_exists_for_user(
link_request: PartnerLinkRequest, sl_user: User, partner: Partner
) -> PartnerUser:
# Find partner_user by user_id
res = PartnerUser.get_by(user_id=sl_user.id)
if res and res.partner_id != partner.id:
raise AccountAlreadyLinkedToAnotherPartnerException()
if not res:
res = PartnerUser.create(
user_id=sl_user.id,
partner_id=partner.id,
partner_email=link_request.email,
external_user_id=link_request.external_user_id,
)
Session.commit()
LOG.i(
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
)
return res
class ClientMergeStrategy(ABC):
def __init__(
self,
link_request: PartnerLinkRequest,
user: Optional[User],
partner: Partner,
):
if self.__class__ == ClientMergeStrategy:
raise RuntimeError("Cannot directly instantiate a ClientMergeStrategy")
self.link_request = link_request
self.user = user
self.partner = partner
@abstractmethod
def process(self) -> LinkResult:
pass
class NewUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
# Will create a new SL User with a random password
new_user = User.create(
email=self.link_request.email,
name=self.link_request.name,
password=random_string(20),
activated=True,
from_partner=self.link_request.from_partner,
)
partner_user = PartnerUser.create(
user_id=new_user.id,
partner_id=self.partner.id,
external_user_id=self.link_request.external_user_id,
partner_email=self.link_request.email,
)
LOG.i(
f"Created new user for login request for partner:{self.partner.id} external_user_id:{self.link_request.external_user_id}. New user {new_user.id} partner_user:{partner_user.id}"
)
set_plan_for_partner_user(
partner_user,
self.link_request.plan,
)
Session.commit()
if not new_user.created_by_partner:
send_welcome_email(new_user)
agent.record_custom_event("PartnerUserCreation", {"partner": self.partner.name})
return LinkResult(
user=new_user,
strategy=self.__class__.__name__,
)
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
partner_user = ensure_partner_user_exists_for_user(
self.link_request, self.user, self.partner
)
set_plan_for_partner_user(partner_user, self.link_request.plan)
return LinkResult(
user=self.user,
strategy=self.__class__.__name__,
)
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
raise AccountAlreadyLinkedToAnotherPartnerException()
def get_login_strategy(
link_request: PartnerLinkRequest, user: Optional[User], partner: Partner
) -> ClientMergeStrategy:
if user is None:
# We couldn't find any SimpleLogin user with the requested e-mail
return NewUserStrategy(link_request, user, partner)
# Check if user is already linked with another partner_user
other_partner_user = PartnerUser.get_by(partner_id=partner.id, user_id=user.id)
if other_partner_user is not None:
return LinkedWithAnotherPartnerUserStrategy(link_request, user, partner)
# There is a SimpleLogin user with the partner_user's e-mail
return ExistingUnlinkedUserStrategy(link_request, user, partner)
def process_login_case(
link_request: PartnerLinkRequest, partner: Partner
) -> LinkResult:
# Sanitize email just in case
link_request.email = sanitize_email(link_request.email)
# Try to find a SimpleLogin user registered with that partner user id
partner_user = PartnerUser.get_by(
partner_id=partner.id, external_user_id=link_request.external_user_id
)
if partner_user is None:
# We didn't find any SimpleLogin user registered with that partner user id
# Try to find it using the partner's e-mail address
user = User.get_by(email=link_request.email)
return get_login_strategy(link_request, user, partner).process()
else:
# We found the SL user registered with that partner user id
# We're done
set_plan_for_partner_user(partner_user, link_request.plan)
# It's the same user. No need to do anything
return LinkResult(
user=partner_user.user,
strategy="Link",
)
def link_user(
link_request: PartnerLinkRequest, current_user: User, partner: Partner
) -> LinkResult:
# Sanitize email just in case
link_request.email = sanitize_email(link_request.email)
partner_user = ensure_partner_user_exists_for_user(
link_request, current_user, partner
)
set_plan_for_partner_user(partner_user, link_request.plan)
agent.record_custom_event("AccountLinked", {"partner": partner.name})
Session.commit()
return LinkResult(
user=current_user,
strategy="Link",
)
def switch_already_linked_user(
link_request: PartnerLinkRequest, partner_user: PartnerUser, current_user: User
):
# Find if the user has another link and unlink it
other_partner_user = PartnerUser.get_by(
user_id=current_user.id,
partner_id=partner_user.partner_id,
)
if other_partner_user is not None:
LOG.i(
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
)
PartnerUser.delete(other_partner_user.id)
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
# Link this partner_user to the current user
partner_user.user_id = current_user.id
# Set plan
set_plan_for_partner_user(partner_user, link_request.plan)
Session.commit()
return LinkResult(
user=current_user,
strategy="Link",
)
def process_link_case(
link_request: PartnerLinkRequest,
current_user: User,
partner: Partner,
) -> LinkResult:
# Sanitize email just in case
link_request.email = sanitize_email(link_request.email)
# Try to find a SimpleLogin user linked with this Partner account
partner_user = PartnerUser.get_by(
partner_id=partner.id, external_user_id=link_request.external_user_id
)
if partner_user is None:
# There is no SL user linked with the partner. Proceed with linking
return link_user(link_request, current_user, partner)
# There is a SL user registered with the partner. Check if is the current one
if partner_user.user_id == current_user.id:
# Update plan
set_plan_for_partner_user(partner_user, link_request.plan)
# It's the same user. No need to do anything
return LinkResult(
user=current_user,
strategy="Link",
)
else:
return switch_already_linked_user(link_request, partner_user, current_user)

622
app/app/admin_model.py Normal file
View File

@ -0,0 +1,622 @@
from typing import Optional
import arrow
import sqlalchemy
from flask_admin.model.template import EndpointLinkRowAction
from markupsafe import Markup
from app import models, s3
from flask import redirect, url_for, request, flash, Response
from flask_admin import expose, AdminIndexView
from flask_admin.actions import action
from flask_admin.contrib import sqla
from flask_login import current_user
from app.db import Session
from app.models import (
User,
ManualSubscription,
Fido,
Subscription,
AppleSubscription,
AdminAuditLog,
AuditLogActionEnum,
ProviderComplaintState,
Phase,
ProviderComplaint,
Alias,
Newsletter,
PADDLE_SUBSCRIPTION_GRACE_DAYS,
)
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
class SLModelView(sqla.ModelView):
column_default_sort = ("id", True)
column_display_pk = True
page_size = 100
can_edit = False
can_create = False
can_delete = False
edit_modal = True
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
return redirect(url_for("auth.login", next=request.url))
def on_model_change(self, form, model, is_created):
changes = {}
for attr in sqlalchemy.inspect(model).attrs:
if attr.history.has_changes() and attr.key not in (
"created_at",
"updated_at",
):
value = attr.value
# If it's a model reference, get the source id
if issubclass(type(value), models.Base):
value = value.id
# otherwise, if its a generic object stringify it
if issubclass(type(value), object):
value = str(value)
changes[attr.key] = value
auditAction = (
AuditLogActionEnum.create_object
if is_created
else AuditLogActionEnum.update_object
)
AdminAuditLog.create(
admin_user_id=current_user.id,
model=model.__class__.__name__,
model_id=model.id,
action=auditAction.value,
data=changes,
)
def on_model_delete(self, model):
AdminAuditLog.create(
admin_user_id=current_user.id,
model=model.__class__.__name__,
model_id=model.id,
action=AuditLogActionEnum.delete_object.value,
)
class SLAdminIndexView(AdminIndexView):
@expose("/")
def index(self):
if not current_user.is_authenticated or not current_user.is_admin:
return redirect(url_for("auth.login", next=request.url))
return redirect("/admin/user")
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class UserAdmin(SLModelView):
column_searchable_list = ["email", "id"]
column_exclude_list = [
"salt",
"password",
"otp_secret",
"last_otp",
"fido_uuid",
"profile_picture",
]
can_edit = False
def scaffold_list_columns(self):
ret = super().scaffold_list_columns()
ret.insert(0, "upgrade_channel")
return ret
column_formatters = {
"upgrade_channel": _user_upgrade_channel_formatter,
}
@action(
"disable_user",
"Disable user",
"Are you sure you want to disable the selected users?",
)
def action_disable_user(self, ids):
for user in User.filter(User.id.in_(ids)):
user.disabled = True
flash(f"Disabled user {user.id}")
AdminAuditLog.disable_user(current_user.id, user.id)
Session.commit()
@action(
"enable_user",
"Enable user",
"Are you sure you want to enable the selected users?",
)
def action_enable_user(self, ids):
for user in User.filter(User.id.in_(ids)):
user.disabled = False
flash(f"Enabled user {user.id}")
AdminAuditLog.enable_user(current_user.id, user.id)
Session.commit()
@action(
"education_upgrade",
"Education upgrade",
"Are you sure you want to edu-upgrade selected users?",
)
def action_edu_upgrade(self, ids):
manual_upgrade("Edu", ids, is_giveaway=True)
@action(
"charity_org_upgrade",
"Charity Organization upgrade",
"Are you sure you want to upgrade selected users using the Charity organization program?",
)
def action_charity_org_upgrade(self, ids):
manual_upgrade("Charity Organization", ids, is_giveaway=True)
@action(
"journalist_upgrade",
"Journalist upgrade",
"Are you sure you want to upgrade selected users using the Journalist program?",
)
def action_journalist_upgrade(self, ids):
manual_upgrade("Journalist", ids, is_giveaway=True)
@action(
"cash_upgrade",
"Cash upgrade",
"Are you sure you want to cash-upgrade selected users?",
)
def action_cash_upgrade(self, ids):
manual_upgrade("Cash", ids, is_giveaway=False)
@action(
"crypto_upgrade",
"Crypto upgrade",
"Are you sure you want to crypto-upgrade selected users?",
)
def action_monero_upgrade(self, ids):
manual_upgrade("Crypto", ids, is_giveaway=False)
@action(
"adhoc_upgrade",
"Adhoc upgrade - for exceptional case",
"Are you sure you want to crypto-upgrade selected users?",
)
def action_adhoc_upgrade(self, ids):
manual_upgrade("Adhoc", ids, is_giveaway=False)
@action(
"extend_trial_1w",
"Extend trial for 1 week more",
"Extend trial for 1 week more?",
)
def extend_trial_1w(self, ids):
for user in User.filter(User.id.in_(ids)):
if user.trial_end and user.trial_end > arrow.now():
user.trial_end = user.trial_end.shift(weeks=1)
else:
user.trial_end = arrow.now().shift(weeks=1)
flash(f"Extend trial for {user} to {user.trial_end}", "success")
AdminAuditLog.extend_trial(
current_user.id, user.id, user.trial_end, "1 week"
)
Session.commit()
@action(
"disable_otp_fido",
"Disable OTP & FIDO",
"Disable OTP & FIDO?",
)
def disable_otp_fido(self, ids):
for user in User.filter(User.id.in_(ids)):
user_had_otp = user.enable_otp
if user.enable_otp:
user.enable_otp = False
flash(f"Disable OTP for {user}", "info")
user_had_fido = user.fido_uuid is not None
if user.fido_uuid:
Fido.filter_by(uuid=user.fido_uuid).delete()
user.fido_uuid = None
flash(f"Disable FIDO for {user}", "info")
AdminAuditLog.disable_otp_fido(
current_user.id, user.id, user_had_otp, user_had_fido
)
Session.commit()
@action(
"stop_paddle_sub",
"Stop user Paddle subscription",
"This will stop the current user Paddle subscription so if user doesn't have Proton sub, they will lose all SL benefits immediately",
)
def stop_paddle_sub(self, ids):
for user in User.filter(User.id.in_(ids)):
sub: Subscription = user.get_paddle_subscription()
if not sub:
flash(f"No Paddle sub for {user}", "warning")
continue
flash(f"{user} sub will end now, instead of {sub.next_bill_date}", "info")
sub.next_bill_date = (
arrow.now().shift(days=-PADDLE_SUBSCRIPTION_GRACE_DAYS).date()
)
Session.commit()
# @action(
# "login_as",
# "Login as this user",
# "Login as this user?",
# )
# def login_as(self, ids):
# if len(ids) != 1:
# flash("only 1 user can be selected", "error")
# return
#
# for user in User.filter(User.id.in_(ids)):
# AdminAuditLog.logged_as_user(current_user.id, user.id)
# login_user(user)
# flash(f"Login as user {user}", "success")
# return redirect("/")
def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
for user in User.filter(User.id.in_(ids)).all():
if user.lifetime:
flash(f"user {user} already has a lifetime license", "warning")
continue
sub: Subscription = user.get_paddle_subscription()
if sub and not sub.cancelled:
flash(
f"user {user} already has a Paddle license, they have to cancel it first",
"warning",
)
continue
apple_sub: AppleSubscription = AppleSubscription.get_by(user_id=user.id)
if apple_sub and apple_sub.is_valid():
flash(
f"user {user} already has a Apple subscription, they have to cancel it first",
"warning",
)
continue
AdminAuditLog.create_manual_upgrade(current_user.id, way, user.id, is_giveaway)
manual_sub: ManualSubscription = ManualSubscription.get_by(user_id=user.id)
if manual_sub:
# renew existing subscription
if manual_sub.end_at > arrow.now():
manual_sub.end_at = manual_sub.end_at.shift(years=1)
else:
manual_sub.end_at = arrow.now().shift(years=1, days=1)
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
continue
ManualSubscription.create(
user_id=user.id,
end_at=arrow.now().shift(years=1, days=1),
comment=way,
is_giveaway=is_giveaway,
)
flash(f"New {way} manual subscription for {user} is created", "success")
Session.commit()
class EmailLogAdmin(SLModelView):
column_searchable_list = ["id"]
column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"]
can_edit = False
can_create = False
class AliasAdmin(SLModelView):
column_searchable_list = ["id", "user.email", "email", "mailbox.email"]
column_filters = ["id", "user.email", "email", "mailbox.email"]
@action(
"disable_email_spoofing_check",
"Disable email spoofing protection",
"Disable email spoofing protection?",
)
def disable_email_spoofing_check_for(self, ids):
for alias in Alias.filter(Alias.id.in_(ids)):
if alias.disable_email_spoofing_check:
flash(
f"Email spoofing protection is already disabled on {alias.email}",
"warning",
)
else:
alias.disable_email_spoofing_check = True
flash(
f"Email spoofing protection is disabled on {alias.email}", "success"
)
Session.commit()
class MailboxAdmin(SLModelView):
column_searchable_list = ["id", "user.email", "email"]
column_filters = ["id", "user.email", "email"]
# class LifetimeCouponAdmin(SLModelView):
# can_edit = True
# can_create = True
class CouponAdmin(SLModelView):
can_edit = False
can_create = True
class ManualSubscriptionAdmin(SLModelView):
can_edit = True
column_searchable_list = ["id", "user.email"]
@action(
"extend_1y",
"Extend for 1 year",
"Extend 1 year more?",
)
def extend_1y(self, ids):
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
ms.end_at = ms.end_at.shift(years=1)
flash(f"Extend subscription for 1 year for {ms.user}", "success")
AdminAuditLog.extend_subscription(
current_user.id, ms.user.id, ms.end_at, "1 year"
)
Session.commit()
@action(
"extend_1m",
"Extend for 1 month",
"Extend 1 month more?",
)
def extend_1m(self, ids):
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
ms.end_at = ms.end_at.shift(months=1)
flash(f"Extend subscription for 1 month for {ms.user}", "success")
AdminAuditLog.extend_subscription(
current_user.id, ms.user.id, ms.end_at, "1 month"
)
Session.commit()
# class ClientAdmin(SLModelView):
# column_searchable_list = ["name", "description", "user.email"]
# column_exclude_list = ["oauth_client_secret", "home_url"]
# can_edit = True
class CustomDomainAdmin(SLModelView):
column_searchable_list = ["domain", "user.email", "user.id"]
column_exclude_list = ["ownership_txt_token"]
can_edit = False
class ReferralAdmin(SLModelView):
column_searchable_list = ["id", "user.email", "code", "name"]
column_filters = ["id", "user.email", "code", "name"]
def scaffold_list_columns(self):
ret = super().scaffold_list_columns()
ret.insert(0, "nb_user")
ret.insert(0, "nb_paid_user")
return ret
# class PayoutAdmin(SLModelView):
# column_searchable_list = ["id", "user.email"]
# column_filters = ["id", "user.email"]
# can_edit = True
# can_create = True
# can_delete = True
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_created_at_formatter(view, context, model, name):
return model.created_at.format()
class AdminAuditLogAdmin(SLModelView):
column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"]
column_filters = ["admin.id", "admin.email", "model_id", "created_at"]
column_exclude_list = ["id"]
column_hide_backrefs = False
can_edit = False
can_create = False
can_delete = False
column_formatters = {
"action": _admin_action_formatter,
"created_at": _admin_created_at_formatter,
}
def _transactionalcomplaint_state_formatter(view, context, model, name):
return "{} ({})".format(ProviderComplaintState(model.state).name, model.state)
def _transactionalcomplaint_phase_formatter(view, context, model, name):
return Phase(model.phase).name
def _transactionalcomplaint_refused_email_id_formatter(view, context, model, name):
markupstring = "<a href='{}'>{}</a>".format(
url_for(".download_eml", id=model.id), model.refused_email.full_report_path
)
return Markup(markupstring)
class ProviderComplaintAdmin(SLModelView):
column_searchable_list = ["id", "user.id", "created_at"]
column_filters = ["user.id", "state"]
column_hide_backrefs = False
can_edit = False
can_create = False
can_delete = False
column_formatters = {
"created_at": _admin_created_at_formatter,
"updated_at": _admin_created_at_formatter,
"state": _transactionalcomplaint_state_formatter,
"phase": _transactionalcomplaint_phase_formatter,
"refused_email": _transactionalcomplaint_refused_email_id_formatter,
}
column_extra_row_actions = [ # Add a new action button
EndpointLinkRowAction("fa fa-check-square", ".mark_ok"),
]
def _get_complaint(self) -> Optional[ProviderComplaint]:
complain_id = request.args.get("id")
if complain_id is None:
flash("Missing id", "error")
return None
complaint = ProviderComplaint.get_by(id=complain_id)
if not complaint:
flash("Could not find complaint", "error")
return None
return complaint
@expose("/mark_ok", methods=["GET"])
def mark_ok(self):
complaint = self._get_complaint()
if not complaint:
return redirect("/admin/transactionalcomplaint/")
complaint.state = ProviderComplaintState.reviewed.value
Session.commit()
return redirect("/admin/transactionalcomplaint/")
@expose("/download_eml", methods=["GET"])
def download_eml(self):
complaint = self._get_complaint()
if not complaint:
return redirect("/admin/transactionalcomplaint/")
eml_path = complaint.refused_email.full_report_path
eml_data = s3.download_email(eml_path)
AdminAuditLog.downloaded_provider_complaint(current_user.id, complaint.id)
Session.commit()
return Response(
eml_data,
mimetype="message/rfc822",
headers={
"Content-Disposition": "attachment;filename={}".format(
complaint.refused_email.path
)
},
)
def _newsletter_plain_text_formatter(view, context, model: Newsletter, name):
# to display newsletter plain_text with linebreaks in the list view
return Markup(model.plain_text.replace("\n", "<br>"))
def _newsletter_html_formatter(view, context, model: Newsletter, name):
# to display newsletter html with linebreaks in the list view
return Markup(model.html.replace("\n", "<br>"))
class NewsletterAdmin(SLModelView):
list_template = "admin/model/newsletter-list.html"
edit_template = "admin/model/newsletter-edit.html"
edit_modal = False
can_edit = True
can_create = True
column_formatters = {
"plain_text": _newsletter_plain_text_formatter,
"html": _newsletter_html_formatter,
}
@action(
"send_newsletter_to_user",
"Send this newsletter to myself or the specified userID",
)
def send_newsletter_to_user(self, newsletter_ids):
user_id = request.form["user_id"]
if user_id:
user = User.get(user_id)
if not user:
flash(f"No such user with ID {user_id}", "error")
return
else:
flash("use the current user", "info")
user = current_user
for newsletter_id in newsletter_ids:
newsletter = Newsletter.get(newsletter_id)
sent, error_msg = send_newsletter_to_user(newsletter, user)
if sent:
flash(f"{newsletter} sent to {user}", "success")
else:
flash(error_msg, "error")
@action(
"send_newsletter_to_address",
"Send this newsletter to a specific address",
)
def send_newsletter_to_address(self, newsletter_ids):
to_address = request.form["to_address"]
if not to_address:
flash("to_address missing", "error")
return
for newsletter_id in newsletter_ids:
newsletter = Newsletter.get(newsletter_id)
# use the current_user for rendering email
sent, error_msg = send_newsletter_to_address(
newsletter, current_user, to_address
)
if sent:
flash(
f"{newsletter} sent to {to_address} with {current_user} context",
"success",
)
else:
flash(error_msg, "error")
class NewsletterUserAdmin(SLModelView):
column_searchable_list = ["id"]
column_filters = ["id", "user.email", "newsletter.subject"]
column_exclude_list = ["created_at", "updated_at", "id"]
can_edit = False
can_create = False
class DailyMetricAdmin(SLModelView):
column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True
class MetricAdmin(SLModelView):
column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True

162
app/app/alias_suffix.py Normal file
View File

@ -0,0 +1,162 @@
from __future__ import annotations
import json
from dataclasses import asdict, dataclass
from typing import Optional
import itsdangerous
from app import config
from app.log import LOG
from app.models import User
signer = itsdangerous.TimestampSigner(config.CUSTOM_ALIAS_SECRET)
@dataclass
class AliasSuffix:
# whether this is a custom domain
is_custom: bool
# Suffix
suffix: str
# Suffix signature
signed_suffix: str
# whether this is a premium SL domain. Not apply to custom domain
is_premium: bool
# can be either Custom or SL domain
domain: str
# if custom domain, whether the custom domain has MX verified, i.e. can receive emails
mx_verified: bool = True
def serialize(self):
return json.dumps(asdict(self))
@classmethod
def deserialize(cls, data: str) -> AliasSuffix:
return AliasSuffix(**json.loads(data))
def check_suffix_signature(signed_suffix: str) -> Optional[str]:
# hypothesis: user will click on the button in the 600 secs
try:
return signer.unsign(signed_suffix, max_age=600).decode()
except itsdangerous.BadSignature:
return None
def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
"""verify if user could create an alias with the given prefix and suffix"""
if not alias_prefix or not alias_suffix: # should be caught on frontend
return False
user_custom_domains = [cd.domain for cd in user.verified_custom_domains()]
# make sure alias_suffix is either .random_word@simplelogin.co or @my-domain.com
alias_suffix = alias_suffix.strip()
# alias_domain_prefix is either a .random_word or ""
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
# alias_domain must be either one of user custom domains or built-in domains
if alias_domain not in user.available_alias_domains():
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False
# SimpleLogin domain case:
# 1) alias_suffix must start with "." and
# 2) alias_domain_prefix must come from the word list
if (
alias_domain in user.available_sl_domains()
and alias_domain not in user_custom_domains
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
and not config.DISABLE_ALIAS_SUFFIX
):
if not alias_domain_prefix.startswith("."):
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
return False
else:
if alias_domain not in user_custom_domains:
if not config.DISABLE_ALIAS_SUFFIX:
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False
if alias_domain not in user.available_sl_domains():
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False
return True
def get_alias_suffixes(user: User) -> [AliasSuffix]:
"""
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
"""
user_custom_domains = user.verified_custom_domains()
alias_suffixes: [AliasSuffix] = []
# put custom domain first
# for each user domain, generate both the domain and a random suffix version
for custom_domain in user_custom_domains:
if custom_domain.random_prefix_generation:
suffix = "." + user.get_random_alias_suffix() + "@" + custom_domain.domain
alias_suffix = AliasSuffix(
is_custom=True,
suffix=suffix,
signed_suffix=signer.sign(suffix).decode(),
is_premium=False,
domain=custom_domain.domain,
mx_verified=custom_domain.verified,
)
if user.default_alias_custom_domain_id == custom_domain.id:
alias_suffixes.insert(0, alias_suffix)
else:
alias_suffixes.append(alias_suffix)
suffix = "@" + custom_domain.domain
alias_suffix = AliasSuffix(
is_custom=True,
suffix=suffix,
signed_suffix=signer.sign(suffix).decode(),
is_premium=False,
domain=custom_domain.domain,
mx_verified=custom_domain.verified,
)
# put the default domain to top
# only if random_prefix_generation isn't enabled
if (
user.default_alias_custom_domain_id == custom_domain.id
and not custom_domain.random_prefix_generation
):
alias_suffixes.insert(0, alias_suffix)
else:
alias_suffixes.append(alias_suffix)
# then SimpleLogin domain
for sl_domain in user.get_sl_domains():
suffix = (
(
""
if config.DISABLE_ALIAS_SUFFIX
else "." + user.get_random_alias_suffix()
)
+ "@"
+ sl_domain.domain
)
alias_suffix = AliasSuffix(
is_custom=False,
suffix=suffix,
signed_suffix=signer.sign(suffix).decode(),
is_premium=sl_domain.premium_only,
domain=sl_domain.domain,
mx_verified=True,
)
# put the default domain to top
if user.default_alias_public_domain_id == sl_domain.id:
alias_suffixes.insert(0, alias_suffix)
else:
alias_suffixes.append(alias_suffix)
return alias_suffixes

399
app/app/alias_utils.py Normal file
View File

@ -0,0 +1,399 @@
import csv
from io import StringIO
import re
from typing import Optional, Tuple
from email_validator import validate_email, EmailNotValidError
from sqlalchemy.exc import IntegrityError, DataError
from flask import make_response
from app.config import (
BOUNCE_PREFIX_FOR_REPLY_PHASE,
BOUNCE_PREFIX,
BOUNCE_SUFFIX,
VERP_PREFIX,
)
from app.db import Session
from app.email_utils import (
get_email_domain_part,
send_cannot_create_directory_alias,
can_create_directory_for_address,
send_cannot_create_directory_alias_disabled,
get_email_local_part,
send_cannot_create_domain_alias,
)
from app.errors import AliasInTrashError
from app.log import LOG
from app.models import (
Alias,
CustomDomain,
Directory,
User,
DeletedAlias,
DomainDeletedAlias,
AliasMailbox,
Mailbox,
EmailLog,
Contact,
AutoCreateRule,
)
from app.regex_utils import regex_match
def get_user_if_alias_would_auto_create(
address: str, notify_user: bool = False
) -> Optional[User]:
banned_prefix = f"{VERP_PREFIX}."
if address.startswith(banned_prefix):
LOG.w("alias %s can't start with %s", address, banned_prefix)
return None
try:
# Prevent addresses with unicode characters (🤯) in them for now.
validate_email(address, check_deliverability=False, allow_smtputf8=False)
except EmailNotValidError:
return None
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
address, notify_user=notify_user
)
if domain_and_rule:
return domain_and_rule[0].user
directory = check_if_alias_can_be_auto_created_for_a_directory(
address, notify_user=notify_user
)
if directory:
return directory.user
return None
def check_if_alias_can_be_auto_created_for_custom_domain(
address: str, notify_user: bool = True
) -> Optional[Tuple[CustomDomain, Optional[AutoCreateRule]]]:
"""
Check if this address would generate an auto created alias.
If that's the case return the domain that would create it and the rule that triggered it.
If there's no rule it's a catchall creation
"""
alias_domain = get_email_domain_part(address)
custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain)
if not custom_domain:
return None
user: User = custom_domain.user
if user.disabled:
LOG.i("Disabled user %s can't create new alias via custom domain", user)
return None
if not user.can_create_new_alias():
LOG.d(f"{user} can't create new custom-domain alias {address}")
if notify_user:
send_cannot_create_domain_alias(custom_domain.user, address, alias_domain)
return None
if not custom_domain.catch_all:
if len(custom_domain.auto_create_rules) == 0:
return None
local = get_email_local_part(address)
for rule in custom_domain.auto_create_rules:
if regex_match(rule.regex, local):
LOG.d(
"%s passes %s on %s",
address,
rule.regex,
custom_domain,
)
return custom_domain, rule
else: # no rule passes
LOG.d("no rule passed to create %s", local)
return None
LOG.d("Create alias via catchall")
return custom_domain, None
def check_if_alias_can_be_auto_created_for_a_directory(
address: str, notify_user: bool = True
) -> Optional[Directory]:
"""
Try to create an alias with directory
If an alias would be created, return the dictionary that would trigger the creation. Otherwise, return None.
"""
# check if alias belongs to a directory, ie having directory/anything@EMAIL_DOMAIN format
if not can_create_directory_for_address(address):
return None
# alias contains one of the 3 special directory separator: "/", "+" or "#"
if "/" in address:
sep = "/"
elif "+" in address:
sep = "+"
elif "#" in address:
sep = "#"
else:
# if there's no directory separator in the alias, no way to auto-create it
return None
directory_name = address[: address.find(sep)]
LOG.d("directory_name %s", directory_name)
directory = Directory.get_by(name=directory_name)
if not directory:
return None
user: User = directory.user
if user.disabled:
LOG.i("Disabled %s can't create new alias with directory", user)
return None
if not user.can_create_new_alias():
LOG.d(f"{user} can't create new directory alias {address}")
if notify_user:
send_cannot_create_directory_alias(user, address, directory_name)
return None
if directory.disabled:
if notify_user:
send_cannot_create_directory_alias_disabled(user, address, directory_name)
return None
return directory
def try_auto_create(address: str) -> Optional[Alias]:
"""Try to auto-create the alias using directory or catch-all domain"""
# VERP for reply phase is {BOUNCE_PREFIX_FOR_REPLY_PHASE}+{email_log.id}+@{alias_domain}
if address.startswith(f"{BOUNCE_PREFIX_FOR_REPLY_PHASE}+") and "+@" in address:
LOG.e("alias %s can't start with %s", address, BOUNCE_PREFIX_FOR_REPLY_PHASE)
return None
# VERP for forward phase is BOUNCE_PREFIX + email_log.id + BOUNCE_SUFFIX
if address.startswith(BOUNCE_PREFIX) and address.endswith(BOUNCE_SUFFIX):
LOG.e("alias %s can't start with %s", address, BOUNCE_PREFIX)
return None
try:
# NOT allow unicode for now
validate_email(address, check_deliverability=False, allow_smtputf8=False)
except EmailNotValidError:
return None
alias = try_auto_create_via_domain(address)
if not alias:
alias = try_auto_create_directory(address)
return alias
def try_auto_create_directory(address: str) -> Optional[Alias]:
"""
Try to create an alias with directory
"""
directory = check_if_alias_can_be_auto_created_for_a_directory(
address, notify_user=True
)
if not directory:
return None
try:
LOG.d("create alias %s for directory %s", address, directory)
mailboxes = directory.mailboxes
alias = Alias.create(
email=address,
user_id=directory.user_id,
directory_id=directory.id,
mailbox_id=mailboxes[0].id,
)
if not directory.user.disable_automatic_alias_note:
alias.note = f"Created by directory {directory.name}"
Session.flush()
for i in range(1, len(mailboxes)):
AliasMailbox.create(
alias_id=alias.id,
mailbox_id=mailboxes[i].id,
)
Session.commit()
return alias
except AliasInTrashError:
LOG.w(
"Alias %s was deleted before, cannot auto-create using directory %s, user %s",
address,
directory.name,
directory.user,
)
return None
except IntegrityError:
LOG.w("Alias %s already exists", address)
Session.rollback()
alias = Alias.get_by(email=address)
return alias
def try_auto_create_via_domain(address: str) -> Optional[Alias]:
"""Try to create an alias with catch-all or auto-create rules on custom domain"""
can_create = check_if_alias_can_be_auto_created_for_custom_domain(address)
if not can_create:
return None
custom_domain, rule = can_create
if rule:
alias_note = f"Created by rule {rule.order} with regex {rule.regex}"
mailboxes = rule.mailboxes
else:
alias_note = "Created by catchall option"
mailboxes = custom_domain.mailboxes
# a rule can have 0 mailboxes. Happened when a mailbox is deleted
if not mailboxes:
LOG.d(
"use %s default mailbox for %s %s",
custom_domain.user,
address,
custom_domain,
)
mailboxes = [custom_domain.user.default_mailbox]
try:
LOG.d("create alias %s for domain %s", address, custom_domain)
alias = Alias.create(
email=address,
user_id=custom_domain.user_id,
custom_domain_id=custom_domain.id,
automatic_creation=True,
mailbox_id=mailboxes[0].id,
)
if not custom_domain.user.disable_automatic_alias_note:
alias.note = alias_note
Session.flush()
for i in range(1, len(mailboxes)):
AliasMailbox.create(
alias_id=alias.id,
mailbox_id=mailboxes[i].id,
)
Session.commit()
return alias
except AliasInTrashError:
LOG.w(
"Alias %s was deleted before, cannot auto-create using domain catch-all %s, user %s",
address,
custom_domain,
custom_domain.user,
)
return None
except IntegrityError:
LOG.w("Alias %s already exists", address)
Session.rollback()
alias = Alias.get_by(email=address)
return alias
except DataError:
LOG.w("Cannot create alias %s", address)
Session.rollback()
return None
def delete_alias(alias: Alias, user: User):
"""
Delete an alias and add it to either global or domain trash
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
"""
# save deleted alias to either global or domain trash
if alias.custom_domain_id:
if not DomainDeletedAlias.get_by(
email=alias.email, domain_id=alias.custom_domain_id
):
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
Session.add(
DomainDeletedAlias(
user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
)
)
Session.commit()
else:
if not DeletedAlias.get_by(email=alias.email):
LOG.d("add %s to global trash", alias)
Session.add(DeletedAlias(email=alias.email))
Session.commit()
LOG.i("delete alias %s", alias)
Alias.filter(Alias.id == alias.id).delete()
Session.commit()
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
"""
get list of aliases for a given mailbox
"""
ret = set(Alias.filter(Alias.mailbox_id == mailbox.id).all())
for alias in (
Session.query(Alias)
.join(AliasMailbox, Alias.id == AliasMailbox.alias_id)
.filter(AliasMailbox.mailbox_id == mailbox.id)
):
ret.add(alias)
return list(ret)
def nb_email_log_for_mailbox(mailbox: Mailbox):
aliases = aliases_for_mailbox(mailbox)
alias_ids = [alias.id for alias in aliases]
return (
Session.query(EmailLog)
.join(Contact, EmailLog.contact_id == Contact.id)
.filter(Contact.alias_id.in_(alias_ids))
.count()
)
# Only lowercase letters, numbers, dots (.), dashes (-) and underscores (_) are currently supported
_ALIAS_PREFIX_PATTERN = r"[0-9a-z-_.]{1,}"
def check_alias_prefix(alias_prefix) -> bool:
if len(alias_prefix) > 40:
return False
if re.fullmatch(_ALIAS_PREFIX_PATTERN, alias_prefix) is None:
return False
return True
def alias_export_csv(user, csv_direct_export=False):
"""
Get user aliases as importable CSV file
Output:
Importable CSV file
"""
data = [["alias", "note", "enabled", "mailboxes"]]
for alias in Alias.filter_by(user_id=user.id).all(): # type: Alias
# Always put the main mailbox first
# It is seen a primary while importing
alias_mailboxes = alias.mailboxes
alias_mailboxes.insert(
0, alias_mailboxes.pop(alias_mailboxes.index(alias.mailbox))
)
mailboxes = " ".join([mailbox.email for mailbox in alias_mailboxes])
data.append([alias.email, alias.note, alias.enabled, mailboxes])
si = StringIO()
cw = csv.writer(si)
cw.writerows(data)
if csv_direct_export:
return si.getvalue()
output = make_response(si.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
output.headers["Content-type"] = "text/csv"
return output

18
app/app/api/__init__.py Normal file
View File

@ -0,0 +1,18 @@
from .views import (
alias_options,
new_custom_alias,
custom_domain,
new_random_alias,
user_info,
auth,
auth_mfa,
alias,
apple,
mailbox,
notification,
setting,
export,
phone,
sudo,
user,
)

67
app/app/api/base.py Normal file
View File

@ -0,0 +1,67 @@
from functools import wraps
from typing import Tuple, Optional
import arrow
from flask import Blueprint, request, jsonify, g
from flask_login import current_user
from app.db import Session
from app.models import ApiKey
api_bp = Blueprint(name="api", import_name=__name__, url_prefix="/api")
SUDO_MODE_MINUTES_VALID = 5
def authorize_request() -> Optional[Tuple[str, int]]:
api_code = request.headers.get("Authentication")
api_key = ApiKey.get_by(code=api_code)
if not api_key:
if current_user.is_authenticated:
g.user = current_user
else:
return jsonify(error="Wrong api key"), 401
else:
# Update api key stats
api_key.last_used = arrow.now()
api_key.times += 1
Session.commit()
g.user = api_key.user
if g.user.disabled:
return jsonify(error="Disabled account"), 403
g.api_key = api_key
return None
def check_sudo_mode_is_active(api_key: ApiKey) -> bool:
return api_key.sudo_mode_at and g.api_key.sudo_mode_at >= arrow.now().shift(
minutes=-SUDO_MODE_MINUTES_VALID
)
def require_api_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
error_return = authorize_request()
if error_return:
return error_return
return f(*args, **kwargs)
return decorated
def require_api_sudo(f):
@wraps(f)
def decorated(*args, **kwargs):
error_return = authorize_request()
if error_return:
return error_return
if not check_sudo_mode_is_active(g.api_key):
return jsonify(error="Need sudo"), 440
return f(*args, **kwargs)
return decorated

407
app/app/api/serializer.py Normal file
View File

@ -0,0 +1,407 @@
from dataclasses import dataclass
from typing import Optional
from arrow import Arrow
from sqlalchemy import or_, func, case, and_
from sqlalchemy.orm import joinedload
from app.config import PAGE_LIMIT
from app.db import Session
from app.models import (
Alias,
Contact,
EmailLog,
Mailbox,
AliasMailbox,
CustomDomain,
User,
)
@dataclass
class AliasInfo:
alias: Alias
mailbox: Mailbox
mailboxes: [Mailbox]
nb_forward: int
nb_blocked: int
nb_reply: int
latest_email_log: EmailLog = None
latest_contact: Contact = None
custom_domain: Optional[CustomDomain] = None
def contain_mailbox(self, mailbox_id: int) -> bool:
return mailbox_id in [m.id for m in self.mailboxes]
def serialize_alias_info(alias_info: AliasInfo) -> dict:
return {
# Alias field
"id": alias_info.alias.id,
"email": alias_info.alias.email,
"creation_date": alias_info.alias.created_at.format(),
"creation_timestamp": alias_info.alias.created_at.timestamp,
"enabled": alias_info.alias.enabled,
"note": alias_info.alias.note,
# activity
"nb_forward": alias_info.nb_forward,
"nb_block": alias_info.nb_blocked,
"nb_reply": alias_info.nb_reply,
}
def serialize_alias_info_v2(alias_info: AliasInfo) -> dict:
res = {
# Alias field
"id": alias_info.alias.id,
"email": alias_info.alias.email,
"creation_date": alias_info.alias.created_at.format(),
"creation_timestamp": alias_info.alias.created_at.timestamp,
"enabled": alias_info.alias.enabled,
"note": alias_info.alias.note,
"name": alias_info.alias.name,
# activity
"nb_forward": alias_info.nb_forward,
"nb_block": alias_info.nb_blocked,
"nb_reply": alias_info.nb_reply,
# mailbox
"mailbox": {"id": alias_info.mailbox.id, "email": alias_info.mailbox.email},
"mailboxes": [
{"id": mailbox.id, "email": mailbox.email}
for mailbox in alias_info.mailboxes
],
"support_pgp": alias_info.alias.mailbox_support_pgp(),
"disable_pgp": alias_info.alias.disable_pgp,
"latest_activity": None,
"pinned": alias_info.alias.pinned,
}
if alias_info.latest_email_log:
email_log = alias_info.latest_email_log
contact = alias_info.latest_contact
# latest activity
res["latest_activity"] = {
"timestamp": email_log.created_at.timestamp,
"action": email_log.get_action(),
"contact": {
"email": contact.website_email,
"name": contact.name,
"reverse_alias": contact.website_send_to(),
},
}
return res
def serialize_contact(contact: Contact, existed=False) -> dict:
res = {
"id": contact.id,
"creation_date": contact.created_at.format(),
"creation_timestamp": contact.created_at.timestamp,
"last_email_sent_date": None,
"last_email_sent_timestamp": None,
"contact": contact.website_email,
"reverse_alias": contact.website_send_to(),
"reverse_alias_address": contact.reply_email,
"existed": existed,
"block_forward": contact.block_forward,
}
email_log: EmailLog = contact.last_reply()
if email_log:
res["last_email_sent_date"] = email_log.created_at.format()
res["last_email_sent_timestamp"] = email_log.created_at.timestamp
return res
def get_alias_infos_with_pagination(user, page_id=0, query=None) -> [AliasInfo]:
ret = []
q = (
Session.query(Alias)
.options(joinedload(Alias.mailbox))
.filter(Alias.user_id == user.id)
.order_by(Alias.created_at.desc())
)
if query:
q = q.filter(
or_(Alias.email.ilike(f"%{query}%"), Alias.note.ilike(f"%{query}%"))
)
q = q.limit(PAGE_LIMIT).offset(page_id * PAGE_LIMIT)
for alias in q:
ret.append(get_alias_info(alias))
return ret
def get_alias_infos_with_pagination_v3(
user,
page_id=0,
query=None,
sort=None,
alias_filter=None,
mailbox_id=None,
directory_id=None,
page_limit=PAGE_LIMIT,
page_size=PAGE_LIMIT,
) -> [AliasInfo]:
q = construct_alias_query(user)
if query:
q = q.filter(
or_(
Alias.email.ilike(f"%{query}%"),
Alias.note.ilike(f"%{query}%"),
# can't use match() here as it uses to_tsquery that expected a tsquery input
# Alias.ts_vector.match(query),
Alias.ts_vector.op("@@")(func.plainto_tsquery("english", query)),
Alias.name.ilike(f"%{query}%"),
)
)
if mailbox_id:
q = q.join(
AliasMailbox, Alias.id == AliasMailbox.alias_id, isouter=True
).filter(
or_(Alias.mailbox_id == mailbox_id, AliasMailbox.mailbox_id == mailbox_id)
)
if directory_id:
q = q.filter(Alias.directory_id == directory_id)
if alias_filter == "enabled":
q = q.filter(Alias.enabled)
elif alias_filter == "disabled":
q = q.filter(Alias.enabled.is_(False))
elif alias_filter == "pinned":
q = q.filter(Alias.pinned)
elif alias_filter == "hibp":
q = q.filter(Alias.hibp_breaches.any())
if sort == "old2new":
q = q.order_by(Alias.created_at)
elif sort == "new2old":
q = q.order_by(Alias.created_at.desc())
elif sort == "a2z":
q = q.order_by(Alias.email)
elif sort == "z2a":
q = q.order_by(Alias.email.desc())
else:
# default sorting
latest_activity = case(
[
(Alias.created_at > EmailLog.created_at, Alias.created_at),
(Alias.created_at < EmailLog.created_at, EmailLog.created_at),
],
else_=Alias.created_at,
)
q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc())
q = list(q.limit(page_limit).offset(page_id * page_size))
ret = []
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
ret.append(
AliasInfo(
alias=alias,
mailbox=alias.mailbox,
mailboxes=alias.mailboxes,
nb_forward=nb_forward,
nb_blocked=nb_blocked,
nb_reply=nb_reply,
latest_email_log=email_log,
latest_contact=contact,
custom_domain=alias.custom_domain,
)
)
return ret
def get_alias_info(alias: Alias) -> AliasInfo:
q = (
Session.query(Contact, EmailLog)
.filter(Contact.alias_id == alias.id)
.filter(EmailLog.contact_id == Contact.id)
)
alias_info = AliasInfo(
alias=alias,
nb_blocked=0,
nb_forward=0,
nb_reply=0,
mailbox=alias.mailbox,
mailboxes=[alias.mailbox],
)
for _, el in q:
if el.is_reply:
alias_info.nb_reply += 1
elif el.blocked:
alias_info.nb_blocked += 1
else:
alias_info.nb_forward += 1
return alias_info
def get_alias_info_v2(alias: Alias, mailbox=None) -> AliasInfo:
if not mailbox:
mailbox = alias.mailbox
q = (
Session.query(Contact, EmailLog)
.filter(Contact.alias_id == alias.id)
.filter(EmailLog.contact_id == Contact.id)
)
latest_activity: Arrow = alias.created_at
latest_email_log = None
latest_contact = None
alias_info = AliasInfo(
alias=alias,
nb_blocked=0,
nb_forward=0,
nb_reply=0,
mailbox=mailbox,
mailboxes=[mailbox],
)
for m in alias._mailboxes:
alias_info.mailboxes.append(m)
# remove duplicates
# can happen that alias.mailbox_id also appears in AliasMailbox table
alias_info.mailboxes = list(set(alias_info.mailboxes))
for contact, email_log in q:
if email_log.is_reply:
alias_info.nb_reply += 1
elif email_log.blocked:
alias_info.nb_blocked += 1
else:
alias_info.nb_forward += 1
if email_log.created_at > latest_activity:
latest_activity = email_log.created_at
latest_email_log = email_log
latest_contact = contact
alias_info.latest_contact = latest_contact
alias_info.latest_email_log = latest_email_log
return alias_info
def get_alias_contacts(alias, page_id: int) -> [dict]:
q = (
Contact.filter_by(alias_id=alias.id)
.order_by(Contact.id.desc())
.limit(PAGE_LIMIT)
.offset(page_id * PAGE_LIMIT)
)
res = []
for fe in q.all():
res.append(serialize_contact(fe))
return res
def get_alias_info_v3(user: User, alias_id: int) -> AliasInfo:
# use the same query construction in get_alias_infos_with_pagination_v3
q = construct_alias_query(user)
q = q.filter(Alias.id == alias_id)
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
return AliasInfo(
alias=alias,
mailbox=alias.mailbox,
mailboxes=alias.mailboxes,
nb_forward=nb_forward,
nb_blocked=nb_blocked,
nb_reply=nb_reply,
latest_email_log=email_log,
latest_contact=contact,
custom_domain=alias.custom_domain,
)
def construct_alias_query(user: User):
# subquery on alias annotated with nb_reply, nb_blocked, nb_forward, max_created_at, latest_email_log_created_at
alias_activity_subquery = (
Session.query(
Alias.id,
func.sum(case([(EmailLog.is_reply, 1)], else_=0)).label("nb_reply"),
func.sum(
case(
[(and_(EmailLog.is_reply.is_(False), EmailLog.blocked), 1)],
else_=0,
)
).label("nb_blocked"),
func.sum(
case(
[
(
and_(
EmailLog.is_reply.is_(False),
EmailLog.blocked.is_(False),
),
1,
)
],
else_=0,
)
).label("nb_forward"),
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
)
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
alias_contact_subquery = (
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
return (
Session.query(
Alias,
Contact,
EmailLog,
alias_activity_subquery.c.nb_reply,
alias_activity_subquery.c.nb_blocked,
alias_activity_subquery.c.nb_forward,
)
.options(joinedload(Alias.hibp_breaches))
.options(joinedload(Alias.custom_domain))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
.filter(Alias.id == alias_activity_subquery.c.id)
.filter(Alias.id == alias_contact_subquery.c.id)
.filter(
or_(
EmailLog.created_at
== alias_activity_subquery.c.latest_email_log_created_at,
and_(
# no email log yet for this alias
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
# to make sure only 1 contact is returned in this case
or_(
Contact.id == alias_contact_subquery.c.max_contact_id,
alias_contact_subquery.c.max_contact_id.is_(None),
),
),
)
)
)

View File

474
app/app/api/views/alias.py Normal file
View File

@ -0,0 +1,474 @@
from deprecated import deprecated
from flask import g
from flask import jsonify
from flask import request
from app import alias_utils
from app.api.base import api_bp, require_api_auth
from app.api.serializer import (
AliasInfo,
serialize_alias_info,
serialize_contact,
get_alias_infos_with_pagination,
get_alias_contacts,
serialize_alias_info_v2,
get_alias_info_v2,
get_alias_infos_with_pagination_v3,
)
from app.dashboard.views.alias_contact_manager import create_contact
from app.dashboard.views.alias_log import get_alias_log
from app.db import Session
from app.errors import (
CannotCreateContactForReverseAlias,
ErrContactErrorUpgradeNeeded,
ErrContactAlreadyExists,
ErrAddressInvalid,
)
from app.models import Alias, Contact, Mailbox, AliasMailbox
@deprecated
@api_bp.route("/aliases", methods=["GET", "POST"])
@require_api_auth
def get_aliases():
"""
Get aliases
Input:
page_id: in query
Output:
- aliases: list of alias:
- id
- email
- creation_date
- creation_timestamp
- nb_forward
- nb_block
- nb_reply
- note
"""
user = g.user
try:
page_id = int(request.args.get("page_id"))
except (ValueError, TypeError):
return jsonify(error="page_id must be provided in request query"), 400
query = None
data = request.get_json(silent=True)
if data:
query = data.get("query")
alias_infos: [AliasInfo] = get_alias_infos_with_pagination(
user, page_id=page_id, query=query
)
return (
jsonify(
aliases=[serialize_alias_info(alias_info) for alias_info in alias_infos]
),
200,
)
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
@require_api_auth
def get_aliases_v2():
"""
Get aliases
Input:
page_id: in query
pinned: in query
disabled: in query
enabled: in query
Output:
- aliases: list of alias:
- id
- email
- creation_date
- creation_timestamp
- nb_forward
- nb_block
- nb_reply
- note
- mailbox
- mailboxes
- support_pgp
- disable_pgp
- latest_activity: null if no activity.
- timestamp
- action: forward|reply|block|bounced
- contact:
- email
- name
- reverse_alias
"""
user = g.user
try:
page_id = int(request.args.get("page_id"))
except (ValueError, TypeError):
return jsonify(error="page_id must be provided in request query"), 400
pinned = "pinned" in request.args
disabled = "disabled" in request.args
enabled = "enabled" in request.args
if pinned:
alias_filter = "pinned"
elif disabled:
alias_filter = "disabled"
elif enabled:
alias_filter = "enabled"
else:
alias_filter = None
query = None
data = request.get_json(silent=True)
if data:
query = data.get("query")
alias_infos: [AliasInfo] = get_alias_infos_with_pagination_v3(
user, page_id=page_id, query=query, alias_filter=alias_filter
)
return (
jsonify(
aliases=[serialize_alias_info_v2(alias_info) for alias_info in alias_infos]
),
200,
)
@api_bp.route("/aliases/<int:alias_id>", methods=["DELETE"])
@require_api_auth
def delete_alias(alias_id):
"""
Delete alias
Input:
alias_id: in url
Output:
200 if deleted successfully
"""
user = g.user
alias = Alias.get(alias_id)
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_utils.delete_alias(alias, user)
return jsonify(deleted=True), 200
@api_bp.route("/aliases/<int:alias_id>/toggle", methods=["POST"])
@require_api_auth
def toggle_alias(alias_id):
"""
Enable/disable alias
Input:
alias_id: in url
Output:
200 along with new status:
- enabled
"""
user = g.user
alias: Alias = Alias.get(alias_id)
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias.enabled = not alias.enabled
Session.commit()
return jsonify(enabled=alias.enabled), 200
@api_bp.route("/aliases/<int:alias_id>/activities")
@require_api_auth
def get_alias_activities(alias_id):
"""
Get aliases
Input:
page_id: in query
Output:
- activities: list of activity:
- from
- to
- timestamp
- action: forward|reply|block|bounced
- reverse_alias
"""
user = g.user
try:
page_id = int(request.args.get("page_id"))
except (ValueError, TypeError):
return jsonify(error="page_id must be provided in request query"), 400
alias: Alias = Alias.get(alias_id)
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_logs = get_alias_log(alias, page_id)
activities = []
for alias_log in alias_logs:
activity = {
"timestamp": alias_log.when.timestamp,
"reverse_alias": alias_log.reverse_alias,
"reverse_alias_address": alias_log.contact.reply_email,
}
if alias_log.is_reply:
activity["from"] = alias_log.alias
activity["to"] = alias_log.website_email
activity["action"] = "reply"
else:
activity["to"] = alias_log.alias
activity["from"] = alias_log.website_email
if alias_log.bounced:
activity["action"] = "bounced"
elif alias_log.blocked:
activity["action"] = "block"
else:
activity["action"] = "forward"
activities.append(activity)
return jsonify(activities=activities), 200
@api_bp.route("/aliases/<int:alias_id>", methods=["PUT", "PATCH"])
@require_api_auth
def update_alias(alias_id):
"""
Update alias note
Input:
alias_id: in url
note (optional): in body
name (optional): in body
mailbox_id (optional): in body
disable_pgp (optional): in body
Output:
200
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
user = g.user
alias: Alias = Alias.get(alias_id)
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
changed = False
if "note" in data:
new_note = data.get("note")
alias.note = new_note
changed = True
if "mailbox_id" in data:
mailbox_id = int(data.get("mailbox_id"))
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
return jsonify(error="Forbidden"), 400
alias.mailbox_id = mailbox_id
changed = True
if "mailbox_ids" in data:
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
mailboxes: [Mailbox] = []
# check if all mailboxes belong to user
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
return jsonify(error="Forbidden"), 400
mailboxes.append(mailbox)
if not mailboxes:
return jsonify(error="Must choose at least one mailbox"), 400
# <<< update alias mailboxes >>>
# first remove all existing alias-mailboxes links
AliasMailbox.filter_by(alias_id=alias.id).delete()
Session.flush()
# then add all new mailboxes
for i, mailbox in enumerate(mailboxes):
if i == 0:
alias.mailbox_id = mailboxes[0].id
else:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
# <<< END update alias mailboxes >>>
changed = True
if "name" in data:
# to make sure alias name doesn't contain linebreak
new_name = data.get("name")
if new_name and len(new_name) > 128:
return jsonify(error="Name can't be longer than 128 characters"), 400
if new_name:
new_name = new_name.replace("\n", "")
alias.name = new_name
changed = True
if "disable_pgp" in data:
alias.disable_pgp = data.get("disable_pgp")
changed = True
if "pinned" in data:
alias.pinned = data.get("pinned")
changed = True
if changed:
Session.commit()
return jsonify(ok=True), 200
@api_bp.route("/aliases/<int:alias_id>", methods=["GET"])
@require_api_auth
def get_alias(alias_id):
"""
Get alias
Input:
alias_id: in url
Output:
Alias info, same as in get_aliases
"""
user = g.user
alias: Alias = Alias.get(alias_id)
if not alias:
return jsonify(error="Unknown error"), 400
if alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
return jsonify(**serialize_alias_info_v2(get_alias_info_v2(alias))), 200
@api_bp.route("/aliases/<int:alias_id>/contacts")
@require_api_auth
def get_alias_contacts_route(alias_id):
"""
Get alias contacts
Input:
page_id: in query
Output:
- contacts: list of contacts:
- creation_date
- creation_timestamp
- last_email_sent_date
- last_email_sent_timestamp
- contact
- reverse_alias
"""
user = g.user
try:
page_id = int(request.args.get("page_id"))
except (ValueError, TypeError):
return jsonify(error="page_id must be provided in request query"), 400
alias: Alias = Alias.get(alias_id)
if not alias:
return jsonify(error="No such alias"), 404
if alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
contacts = get_alias_contacts(alias, page_id)
return jsonify(contacts=contacts), 200
@api_bp.route("/aliases/<int:alias_id>/contacts", methods=["POST"])
@require_api_auth
def create_contact_route(alias_id):
"""
Create contact for an alias
Input:
alias_id: in url
contact: in body
Output:
201 if success
409 if contact already added
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
alias: Alias = Alias.get(alias_id)
if alias.user_id != g.user.id:
return jsonify(error="Forbidden"), 403
contact_address = data.get("contact")
try:
contact = create_contact(g.user, alias, contact_address)
except ErrContactErrorUpgradeNeeded as err:
return jsonify(error=err.error_for_user()), 403
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
return jsonify(error=err.error_for_user()), 400
except ErrContactAlreadyExists as err:
return jsonify(**serialize_contact(err.contact, existed=True)), 200
return jsonify(**serialize_contact(contact)), 201
@api_bp.route("/contacts/<int:contact_id>", methods=["DELETE"])
@require_api_auth
def delete_contact(contact_id):
"""
Delete contact
Input:
contact_id: in url
Output:
200
"""
user = g.user
contact = Contact.get(contact_id)
if not contact or contact.alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
Contact.delete(contact_id)
Session.commit()
return jsonify(deleted=True), 200
@api_bp.route("/contacts/<int:contact_id>/toggle", methods=["POST"])
@require_api_auth
def toggle_contact(contact_id):
"""
Block/Unblock contact
Input:
contact_id: in url
Output:
200
"""
user = g.user
contact = Contact.get(contact_id)
if not contact or contact.alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
contact.block_forward = not contact.block_forward
Session.commit()
return jsonify(block_forward=contact.block_forward), 200

View File

@ -0,0 +1,153 @@
import tldextract
from flask import jsonify, request, g
from sqlalchemy import desc
from app.alias_suffix import get_alias_suffixes
from app.api.base import api_bp, require_api_auth
from app.db import Session
from app.log import LOG
from app.models import AliasUsedOn, Alias, User
from app.utils import convert_to_id
@api_bp.route("/v4/alias/options")
@require_api_auth
def options_v4():
"""
Return what options user has when creating new alias.
Same as v3 but return time-based signed-suffix in addition to suffix. To be used with /v2/alias/custom/new
Input:
a valid api-key in "Authentication" header and
optional "hostname" in args
Output: cf README
can_create: bool
suffixes: [[suffix, signed_suffix]]
prefix_suggestion: str
recommendation: Optional dict
alias: str
hostname: str
"""
user = g.user
hostname = request.args.get("hostname")
ret = {
"can_create": user.can_create_new_alias(),
"suffixes": [],
"prefix_suggestion": "",
}
# recommendation alias if exist
if hostname:
# put the latest used alias first
q = (
Session.query(AliasUsedOn, Alias, User)
.filter(
AliasUsedOn.alias_id == Alias.id,
Alias.user_id == user.id,
AliasUsedOn.hostname == hostname,
)
.order_by(desc(AliasUsedOn.created_at))
)
r = q.first()
if r:
_, alias, _ = r
LOG.d("found alias %s %s %s", alias, hostname, user)
ret["recommendation"] = {"alias": alias.email, "hostname": hostname}
# custom alias suggestion and suffix
if hostname:
# keep only the domain name of hostname, ignore TLD and subdomain
# for ex www.groupon.com -> groupon
ext = tldextract.extract(hostname)
prefix_suggestion = ext.domain
prefix_suggestion = convert_to_id(prefix_suggestion)
ret["prefix_suggestion"] = prefix_suggestion
suffixes = get_alias_suffixes(user)
# custom domain should be put first
ret["suffixes"] = list([suffix.suffix, suffix.signed_suffix] for suffix in suffixes)
return jsonify(ret)
@api_bp.route("/v5/alias/options")
@require_api_auth
def options_v5():
"""
Return what options user has when creating new alias.
Same as v4 but uses a better format. To be used with /v2/alias/custom/new
Input:
a valid api-key in "Authentication" header and
optional "hostname" in args
Output: cf README
can_create: bool
suffixes: [
{
suffix: "suffix",
signed_suffix: "signed_suffix",
is_custom: true,
is_premium: false
}
]
prefix_suggestion: str
recommendation: Optional dict
alias: str
hostname: str
"""
user = g.user
hostname = request.args.get("hostname")
ret = {
"can_create": user.can_create_new_alias(),
"suffixes": [],
"prefix_suggestion": "",
}
# recommendation alias if exist
if hostname:
# put the latest used alias first
q = (
Session.query(AliasUsedOn, Alias, User)
.filter(
AliasUsedOn.alias_id == Alias.id,
Alias.user_id == user.id,
AliasUsedOn.hostname == hostname,
)
.order_by(desc(AliasUsedOn.created_at))
)
r = q.first()
if r:
_, alias, _ = r
LOG.d("found alias %s %s %s", alias, hostname, user)
ret["recommendation"] = {"alias": alias.email, "hostname": hostname}
# custom alias suggestion and suffix
if hostname:
# keep only the domain name of hostname, ignore TLD and subdomain
# for ex www.groupon.com -> groupon
ext = tldextract.extract(hostname)
prefix_suggestion = ext.domain
prefix_suggestion = convert_to_id(prefix_suggestion)
ret["prefix_suggestion"] = prefix_suggestion
suffixes = get_alias_suffixes(user)
# custom domain should be put first
ret["suffixes"] = [
{
"suffix": suffix.suffix,
"signed_suffix": suffix.signed_suffix,
"is_custom": suffix.is_custom,
"is_premium": suffix.is_premium,
}
for suffix in suffixes
]
return jsonify(ret)

559
app/app/api/views/apple.py Normal file
View File

@ -0,0 +1,559 @@
from typing import Optional
import arrow
import requests
from flask import g
from flask import jsonify
from flask import request
from requests import RequestException
from app.api.base import api_bp, require_api_auth
from app.config import APPLE_API_SECRET, MACAPP_APPLE_API_SECRET
from app.db import Session
from app.log import LOG
from app.models import PlanEnum, AppleSubscription
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
# Apple API URL
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
@api_bp.route("/apple/process_payment", methods=["POST"])
@require_api_auth
def apple_process_payment():
"""
Process payment
Input:
receipt_data: in body
(optional) is_macapp: in body
Output:
200 of the payment is successful, i.e. user is upgraded to premium
"""
user = g.user
LOG.d("request for /apple/process_payment from %s", user)
data = request.get_json()
receipt_data = data.get("receipt_data")
is_macapp = "is_macapp" in data and data["is_macapp"] is True
if is_macapp:
LOG.d("Use Macapp secret")
password = MACAPP_APPLE_API_SECRET
else:
password = APPLE_API_SECRET
apple_sub = verify_receipt(receipt_data, user, password)
if apple_sub:
return jsonify(ok=True), 200
return jsonify(error="Processing failed"), 400
@api_bp.route("/apple/update_notification", methods=["GET", "POST"])
def apple_update_notification():
"""
The "Subscription Status URL" to receive update notifications from Apple
"""
# request.json looks like this
# will use unified_receipt.latest_receipt_info and NOT latest_expired_receipt_info
# more info on https://developer.apple.com/documentation/appstoreservernotifications/responsebody
# {
# "unified_receipt": {
# "latest_receipt": "long string",
# "pending_renewal_info": [
# {
# "is_in_billing_retry_period": "0",
# "auto_renew_status": "0",
# "original_transaction_id": "1000000654277043",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "expiration_intent": "1",
# "auto_renew_product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# }
# ],
# "environment": "Sandbox",
# "status": 0,
# "latest_receipt_info": [
# {
# "expires_date_pst": "2020-04-20 21:11:57 America/Los_Angeles",
# "purchase_date": "2020-04-21 03:11:57 Etc/GMT",
# "purchase_date_ms": "1587438717000",
# "original_purchase_date_ms": "1587420715000",
# "transaction_id": "1000000654329911",
# "original_transaction_id": "1000000654277043",
# "quantity": "1",
# "expires_date_ms": "1587442317000",
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "subscription_group_identifier": "20624274",
# "web_order_line_item_id": "1000000051891577",
# "expires_date": "2020-04-21 04:11:57 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# "purchase_date_pst": "2020-04-20 20:11:57 America/Los_Angeles",
# "is_trial_period": "false",
# },
# {
# "expires_date_pst": "2020-04-20 20:11:57 America/Los_Angeles",
# "purchase_date": "2020-04-21 02:11:57 Etc/GMT",
# "purchase_date_ms": "1587435117000",
# "original_purchase_date_ms": "1587420715000",
# "transaction_id": "1000000654313889",
# "original_transaction_id": "1000000654277043",
# "quantity": "1",
# "expires_date_ms": "1587438717000",
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "subscription_group_identifier": "20624274",
# "web_order_line_item_id": "1000000051890729",
# "expires_date": "2020-04-21 03:11:57 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# "purchase_date_pst": "2020-04-20 19:11:57 America/Los_Angeles",
# "is_trial_period": "false",
# },
# {
# "expires_date_pst": "2020-04-20 19:11:54 America/Los_Angeles",
# "purchase_date": "2020-04-21 01:11:54 Etc/GMT",
# "purchase_date_ms": "1587431514000",
# "original_purchase_date_ms": "1587420715000",
# "transaction_id": "1000000654300800",
# "original_transaction_id": "1000000654277043",
# "quantity": "1",
# "expires_date_ms": "1587435114000",
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "subscription_group_identifier": "20624274",
# "web_order_line_item_id": "1000000051890161",
# "expires_date": "2020-04-21 02:11:54 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# "purchase_date_pst": "2020-04-20 18:11:54 America/Los_Angeles",
# "is_trial_period": "false",
# },
# {
# "expires_date_pst": "2020-04-20 18:11:54 America/Los_Angeles",
# "purchase_date": "2020-04-21 00:11:54 Etc/GMT",
# "purchase_date_ms": "1587427914000",
# "original_purchase_date_ms": "1587420715000",
# "transaction_id": "1000000654293615",
# "original_transaction_id": "1000000654277043",
# "quantity": "1",
# "expires_date_ms": "1587431514000",
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "subscription_group_identifier": "20624274",
# "web_order_line_item_id": "1000000051889539",
# "expires_date": "2020-04-21 01:11:54 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# "purchase_date_pst": "2020-04-20 17:11:54 America/Los_Angeles",
# "is_trial_period": "false",
# },
# {
# "expires_date_pst": "2020-04-20 17:11:54 America/Los_Angeles",
# "purchase_date": "2020-04-20 23:11:54 Etc/GMT",
# "purchase_date_ms": "1587424314000",
# "original_purchase_date_ms": "1587420715000",
# "transaction_id": "1000000654285464",
# "original_transaction_id": "1000000654277043",
# "quantity": "1",
# "expires_date_ms": "1587427914000",
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "subscription_group_identifier": "20624274",
# "web_order_line_item_id": "1000000051888827",
# "expires_date": "2020-04-21 00:11:54 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# "purchase_date_pst": "2020-04-20 16:11:54 America/Los_Angeles",
# "is_trial_period": "false",
# },
# {
# "expires_date_pst": "2020-04-20 16:11:54 America/Los_Angeles",
# "purchase_date": "2020-04-20 22:11:54 Etc/GMT",
# "purchase_date_ms": "1587420714000",
# "original_purchase_date_ms": "1587420715000",
# "transaction_id": "1000000654277043",
# "original_transaction_id": "1000000654277043",
# "quantity": "1",
# "expires_date_ms": "1587424314000",
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "subscription_group_identifier": "20624274",
# "web_order_line_item_id": "1000000051888825",
# "expires_date": "2020-04-20 23:11:54 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# "purchase_date_pst": "2020-04-20 15:11:54 America/Los_Angeles",
# "is_trial_period": "false",
# },
# ],
# },
# "auto_renew_status_change_date": "2020-04-21 04:11:33 Etc/GMT",
# "environment": "Sandbox",
# "auto_renew_status": "false",
# "auto_renew_status_change_date_pst": "2020-04-20 21:11:33 America/Los_Angeles",
# "latest_expired_receipt": "long string",
# "latest_expired_receipt_info": {
# "original_purchase_date_pst": "2020-04-20 15:11:55 America/Los_Angeles",
# "quantity": "1",
# "subscription_group_identifier": "20624274",
# "unique_vendor_identifier": "4C4DF6BA-DE2A-4737-9A68-5992338886DC",
# "original_purchase_date_ms": "1587420715000",
# "expires_date_formatted": "2020-04-21 04:11:57 Etc/GMT",
# "is_in_intro_offer_period": "false",
# "purchase_date_ms": "1587438717000",
# "expires_date_formatted_pst": "2020-04-20 21:11:57 America/Los_Angeles",
# "is_trial_period": "false",
# "item_id": "1508744966",
# "unique_identifier": "b55fc3dcc688e979115af0697a0195be78be7cbd",
# "original_transaction_id": "1000000654277043",
# "expires_date": "1587442317000",
# "transaction_id": "1000000654329911",
# "bvrs": "3",
# "web_order_line_item_id": "1000000051891577",
# "version_external_identifier": "834289833",
# "bid": "io.simplelogin.ios-app",
# "product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "purchase_date": "2020-04-21 03:11:57 Etc/GMT",
# "purchase_date_pst": "2020-04-20 20:11:57 America/Los_Angeles",
# "original_purchase_date": "2020-04-20 22:11:55 Etc/GMT",
# },
# "password": "22b9d5a110dd4344a1681631f1f95f55",
# "auto_renew_status_change_date_ms": "1587442293000",
# "auto_renew_product_id": "io.simplelogin.ios_app.subscription.premium.yearly",
# "notification_type": "DID_CHANGE_RENEWAL_STATUS",
# }
LOG.d("request for /api/apple/update_notification")
data = request.get_json()
if not (
data
and data.get("unified_receipt")
and data["unified_receipt"].get("latest_receipt_info")
):
LOG.d("Invalid data %s", data)
return jsonify(error="Empty Response"), 400
transactions = data["unified_receipt"]["latest_receipt_info"]
# dict of original_transaction_id and transaction
latest_transactions = {}
for transaction in transactions:
original_transaction_id = transaction["original_transaction_id"]
if not latest_transactions.get(original_transaction_id):
latest_transactions[original_transaction_id] = transaction
if (
transaction["expires_date_ms"]
> latest_transactions[original_transaction_id]["expires_date_ms"]
):
latest_transactions[original_transaction_id] = transaction
for original_transaction_id, transaction in latest_transactions.items():
expires_date = arrow.get(int(transaction["expires_date_ms"]) / 1000)
plan = (
PlanEnum.monthly
if transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
else PlanEnum.yearly
)
apple_sub: AppleSubscription = AppleSubscription.get_by(
original_transaction_id=original_transaction_id
)
if apple_sub:
user = apple_sub.user
LOG.d(
"Update AppleSubscription for user %s, expired at %s, plan %s",
user,
expires_date,
plan,
)
apple_sub.receipt_data = data["unified_receipt"]["latest_receipt"]
apple_sub.expires_date = expires_date
apple_sub.plan = plan
apple_sub.product_id = transaction["product_id"]
Session.commit()
return jsonify(ok=True), 200
else:
LOG.w(
"No existing AppleSub for original_transaction_id %s",
original_transaction_id,
)
LOG.d("request data %s", data)
return jsonify(error="Processing failed"), 400
def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
"""
Call https://buy.itunes.apple.com/verifyReceipt and create/update AppleSubscription table
Call the production URL for verifyReceipt first,
use sandbox URL if receive a 21007 status code.
Return AppleSubscription object if success
https://developer.apple.com/documentation/appstorereceipts/verifyreceipt
"""
LOG.d("start verify_receipt")
try:
r = requests.post(
_PROD_URL, json={"receipt-data": receipt_data, "password": password}
)
except RequestException:
LOG.w("cannot call Apple server %s", _PROD_URL)
return None
if r.status_code >= 500:
LOG.w("Apple server error, response:%s %s", r, r.content)
return None
if r.json() == {"status": 21007}:
# try sandbox_url
LOG.w("Use the sandbox url instead")
r = requests.post(
_SANDBOX_URL,
json={"receipt-data": receipt_data, "password": password},
)
data = r.json()
# data has the following format
# {
# "status": 0,
# "environment": "Sandbox",
# "receipt": {
# "receipt_type": "ProductionSandbox",
# "adam_id": 0,
# "app_item_id": 0,
# "bundle_id": "io.simplelogin.ios-app",
# "application_version": "2",
# "download_id": 0,
# "version_external_identifier": 0,
# "receipt_creation_date": "2020-04-18 16:36:34 Etc/GMT",
# "receipt_creation_date_ms": "1587227794000",
# "receipt_creation_date_pst": "2020-04-18 09:36:34 America/Los_Angeles",
# "request_date": "2020-04-18 16:46:36 Etc/GMT",
# "request_date_ms": "1587228396496",
# "request_date_pst": "2020-04-18 09:46:36 America/Los_Angeles",
# "original_purchase_date": "2013-08-01 07:00:00 Etc/GMT",
# "original_purchase_date_ms": "1375340400000",
# "original_purchase_date_pst": "2013-08-01 00:00:00 America/Los_Angeles",
# "original_application_version": "1.0",
# "in_app": [
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653584474",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:27:42 Etc/GMT",
# "purchase_date_ms": "1587227262000",
# "purchase_date_pst": "2020-04-18 09:27:42 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:32:42 Etc/GMT",
# "expires_date_ms": "1587227562000",
# "expires_date_pst": "2020-04-18 09:32:42 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847459",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# },
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653584861",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:32:42 Etc/GMT",
# "purchase_date_ms": "1587227562000",
# "purchase_date_pst": "2020-04-18 09:32:42 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:37:42 Etc/GMT",
# "expires_date_ms": "1587227862000",
# "expires_date_pst": "2020-04-18 09:37:42 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847461",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# },
# ],
# },
# "latest_receipt_info": [
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653584474",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:27:42 Etc/GMT",
# "purchase_date_ms": "1587227262000",
# "purchase_date_pst": "2020-04-18 09:27:42 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:32:42 Etc/GMT",
# "expires_date_ms": "1587227562000",
# "expires_date_pst": "2020-04-18 09:32:42 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847459",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# "subscription_group_identifier": "20624274",
# },
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653584861",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:32:42 Etc/GMT",
# "purchase_date_ms": "1587227562000",
# "purchase_date_pst": "2020-04-18 09:32:42 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:37:42 Etc/GMT",
# "expires_date_ms": "1587227862000",
# "expires_date_pst": "2020-04-18 09:37:42 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847461",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# "subscription_group_identifier": "20624274",
# },
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653585235",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:38:16 Etc/GMT",
# "purchase_date_ms": "1587227896000",
# "purchase_date_pst": "2020-04-18 09:38:16 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:43:16 Etc/GMT",
# "expires_date_ms": "1587228196000",
# "expires_date_pst": "2020-04-18 09:43:16 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847500",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# "subscription_group_identifier": "20624274",
# },
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653585760",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:44:25 Etc/GMT",
# "purchase_date_ms": "1587228265000",
# "purchase_date_pst": "2020-04-18 09:44:25 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:49:25 Etc/GMT",
# "expires_date_ms": "1587228565000",
# "expires_date_pst": "2020-04-18 09:49:25 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847566",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# "subscription_group_identifier": "20624274",
# },
# ],
# "latest_receipt": "very long string",
# "pending_renewal_info": [
# {
# "auto_renew_product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "original_transaction_id": "1000000653584474",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "auto_renew_status": "1",
# }
# ],
# }
if data["status"] != 0:
LOG.e(
"verifyReceipt status !=0, probably invalid receipt. User %s, data %s",
user,
data,
)
return None
# use responseBody.Latest_receipt_info and not responseBody.Receipt.In_app
# as recommended on https://developer.apple.com/documentation/appstorereceipts/responsebody/receipt/in_app
# each item in data["latest_receipt_info"] has the following format
# {
# "quantity": "1",
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
# "transaction_id": "1000000653584474",
# "original_transaction_id": "1000000653584474",
# "purchase_date": "2020-04-18 16:27:42 Etc/GMT",
# "purchase_date_ms": "1587227262000",
# "purchase_date_pst": "2020-04-18 09:27:42 America/Los_Angeles",
# "original_purchase_date": "2020-04-18 16:27:44 Etc/GMT",
# "original_purchase_date_ms": "1587227264000",
# "original_purchase_date_pst": "2020-04-18 09:27:44 America/Los_Angeles",
# "expires_date": "2020-04-18 16:32:42 Etc/GMT",
# "expires_date_ms": "1587227562000",
# "expires_date_pst": "2020-04-18 09:32:42 America/Los_Angeles",
# "web_order_line_item_id": "1000000051847459",
# "is_trial_period": "false",
# "is_in_intro_offer_period": "false",
# }
transactions = data.get("latest_receipt_info")
if not transactions:
LOG.i("Empty transactions in data %s", data)
return None
latest_transaction = max(transactions, key=lambda t: int(t["expires_date_ms"]))
original_transaction_id = latest_transaction["original_transaction_id"]
expires_date = arrow.get(int(latest_transaction["expires_date_ms"]) / 1000)
plan = (
PlanEnum.monthly
if latest_transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
else PlanEnum.yearly
)
apple_sub: AppleSubscription = AppleSubscription.get_by(user_id=user.id)
if apple_sub:
LOG.d(
"Update AppleSubscription for user %s, expired at %s (%s), plan %s",
user,
expires_date,
expires_date.humanize(),
plan,
)
apple_sub.receipt_data = receipt_data
apple_sub.expires_date = expires_date
apple_sub.original_transaction_id = original_transaction_id
apple_sub.product_id = latest_transaction["product_id"]
apple_sub.plan = plan
else:
# the same original_transaction_id has been used on another account
if AppleSubscription.get_by(original_transaction_id=original_transaction_id):
LOG.e("Same Apple Sub has been used before, current user %s", user)
return None
LOG.d(
"Create new AppleSubscription for user %s, expired at %s, plan %s",
user,
expires_date,
plan,
)
apple_sub = AppleSubscription.create(
user_id=user.id,
receipt_data=receipt_data,
expires_date=expires_date,
original_transaction_id=original_transaction_id,
plan=plan,
product_id=latest_transaction["product_id"],
)
Session.commit()
return apple_sub

383
app/app/api/views/auth.py Normal file
View File

@ -0,0 +1,383 @@
import secrets
import string
import facebook
import google.oauth2.credentials
import googleapiclient.discovery
from flask import jsonify, request
from flask_login import login_user
from itsdangerous import Signer
from app import email_utils
from app.api.base import api_bp
from app.config import FLASK_SECRET, DISABLE_REGISTRATION
from app.dashboard.views.setting import send_reset_password_email
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
personal_email_already_used,
send_email,
render,
)
from app.events.auth_event import LoginEvent, RegisterEvent
from app.extensions import limiter
from app.log import LOG
from app.models import User, ApiKey, SocialAuth, AccountActivation
from app.utils import sanitize_email, canonicalize_email
@api_bp.route("/auth/login", methods=["POST"])
@limiter.limit("10/minute")
def auth_login():
"""
Authenticate user
Input:
email
password
device: to create an ApiKey associated with this device
Output:
200 and user info containing:
{
name: "John Wick",
mfa_enabled: true,
mfa_key: "a long string",
api_key: "a long string"
}
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
password = data.get("password")
device = data.get("device")
email = sanitize_email(data.get("email"))
canonical_email = canonicalize_email(data.get("email"))
user = User.get_by(email=email) or User.get_by(email=canonical_email)
if not user or not user.check_password(password):
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
return jsonify(error="Email or password incorrect"), 400
elif user.disabled:
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
return jsonify(error="Account disabled"), 400
elif not user.activated:
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
return jsonify(error="Account not activated"), 422
elif user.fido_enabled():
# allow user who has TOTP enabled to continue using the mobile app
if not user.enable_otp:
return jsonify(error="Currently we don't support FIDO on mobile yet"), 403
LoginEvent(LoginEvent.ActionType.success, LoginEvent.Source.api).send()
return jsonify(**auth_payload(user, device)), 200
@api_bp.route("/auth/register", methods=["POST"])
@limiter.limit("10/minute")
def auth_register():
"""
User signs up - will need to activate their account with an activation code.
Input:
email
password
Output:
200: user needs to confirm their account
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
dirty_email = data.get("email")
email = canonicalize_email(dirty_email)
password = data.get("password")
if DISABLE_REGISTRATION:
RegisterEvent(RegisterEvent.ActionType.failed, RegisterEvent.Source.api).send()
return jsonify(error="registration is closed"), 400
if not email_can_be_used_as_mailbox(email) or personal_email_already_used(email):
RegisterEvent(
RegisterEvent.ActionType.invalid_email, RegisterEvent.Source.api
).send()
return jsonify(error=f"cannot use {email} as personal inbox"), 400
if not password or len(password) < 8:
RegisterEvent(RegisterEvent.ActionType.failed, RegisterEvent.Source.api).send()
return jsonify(error="password too short"), 400
if len(password) > 100:
RegisterEvent(RegisterEvent.ActionType.failed, RegisterEvent.Source.api).send()
return jsonify(error="password too long"), 400
LOG.d("create user %s", email)
user = User.create(email=email, name=dirty_email, password=password)
Session.flush()
# create activation code
code = "".join([str(secrets.choice(string.digits)) for _ in range(6)])
AccountActivation.create(user_id=user.id, code=code)
Session.commit()
send_email(
email,
"Just one more step to join SimpleLogin",
render("transactional/code-activation.txt.jinja2", code=code),
render("transactional/code-activation.html", code=code),
)
RegisterEvent(RegisterEvent.ActionType.success, RegisterEvent.Source.api).send()
return jsonify(msg="User needs to confirm their account"), 200
@api_bp.route("/auth/activate", methods=["POST"])
@limiter.limit("10/minute")
def auth_activate():
"""
User enters the activation code to confirm their account.
Input:
email
code
Output:
200: user account is now activated, user can login now
400: wrong email, code
410: wrong code too many times
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
email = sanitize_email(data.get("email"))
canonical_email = canonicalize_email(data.get("email"))
code = data.get("code")
user = User.get_by(email=email) or User.get_by(email=canonical_email)
# do not use a different message to avoid exposing existing email
if not user or user.activated:
return jsonify(error="Wrong email or code"), 400
account_activation = AccountActivation.get_by(user_id=user.id)
if not account_activation:
return jsonify(error="Wrong email or code"), 400
if account_activation.code != code:
# decrement nb tries
account_activation.tries -= 1
Session.commit()
if account_activation.tries == 0:
AccountActivation.delete(account_activation.id)
Session.commit()
return jsonify(error="Too many wrong tries"), 410
return jsonify(error="Wrong email or code"), 400
LOG.d("activate user %s", user)
user.activated = True
AccountActivation.delete(account_activation.id)
Session.commit()
return jsonify(msg="Account is activated, user can login now"), 200
@api_bp.route("/auth/reactivate", methods=["POST"])
@limiter.limit("10/minute")
def auth_reactivate():
"""
User asks for another activation code
Input:
email
Output:
200: user is going to receive an email for activate their account
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
email = sanitize_email(data.get("email"))
canonical_email = canonicalize_email(data.get("email"))
user = User.get_by(email=email) or User.get_by(email=canonical_email)
# do not use a different message to avoid exposing existing email
if not user or user.activated:
return jsonify(error="Something went wrong"), 400
account_activation = AccountActivation.get_by(user_id=user.id)
if account_activation:
AccountActivation.delete(account_activation.id)
Session.commit()
# create activation code
code = "".join([str(secrets.choice(string.digits)) for _ in range(6)])
AccountActivation.create(user_id=user.id, code=code)
Session.commit()
send_email(
email,
"Just one more step to join SimpleLogin",
render("transactional/code-activation.txt.jinja2", code=code),
render("transactional/code-activation.html", code=code),
)
return jsonify(msg="User needs to confirm their account"), 200
@api_bp.route("/auth/facebook", methods=["POST"])
@limiter.limit("10/minute")
def auth_facebook():
"""
Authenticate user with Facebook
Input:
facebook_token: facebook access token
device: to create an ApiKey associated with this device
Output:
200 and user info containing:
{
name: "John Wick",
mfa_enabled: true,
mfa_key: "a long string",
api_key: "a long string"
}
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
facebook_token = data.get("facebook_token")
device = data.get("device")
graph = facebook.GraphAPI(access_token=facebook_token)
user_info = graph.get_object("me", fields="email,name")
email = sanitize_email(user_info.get("email"))
user = User.get_by(email=email)
if not user:
if DISABLE_REGISTRATION:
return jsonify(error="registration is closed"), 400
if not email_can_be_used_as_mailbox(email) or personal_email_already_used(
email
):
return jsonify(error=f"cannot use {email} as personal inbox"), 400
LOG.d("create facebook user with %s", user_info)
user = User.create(email=email, name=user_info["name"], activated=True)
Session.commit()
email_utils.send_welcome_email(user)
if not SocialAuth.get_by(user_id=user.id, social="facebook"):
SocialAuth.create(user_id=user.id, social="facebook")
Session.commit()
return jsonify(**auth_payload(user, device)), 200
@api_bp.route("/auth/google", methods=["POST"])
@limiter.limit("10/minute")
def auth_google():
"""
Authenticate user with Google
Input:
google_token: Google access token
device: to create an ApiKey associated with this device
Output:
200 and user info containing:
{
name: "John Wick",
mfa_enabled: true,
mfa_key: "a long string",
api_key: "a long string"
}
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
google_token = data.get("google_token")
device = data.get("device")
cred = google.oauth2.credentials.Credentials(token=google_token)
build = googleapiclient.discovery.build("oauth2", "v2", credentials=cred)
user_info = build.userinfo().get().execute()
email = sanitize_email(user_info.get("email"))
user = User.get_by(email=email)
if not user:
if DISABLE_REGISTRATION:
return jsonify(error="registration is closed"), 400
if not email_can_be_used_as_mailbox(email) or personal_email_already_used(
email
):
return jsonify(error=f"cannot use {email} as personal inbox"), 400
LOG.d("create Google user with %s", user_info)
user = User.create(email=email, name="", activated=True)
Session.commit()
email_utils.send_welcome_email(user)
if not SocialAuth.get_by(user_id=user.id, social="google"):
SocialAuth.create(user_id=user.id, social="google")
Session.commit()
return jsonify(**auth_payload(user, device)), 200
def auth_payload(user, device) -> dict:
ret = {"name": user.name or "", "email": user.email, "mfa_enabled": user.enable_otp}
# do not give api_key, user can only obtain api_key after OTP verification
if user.enable_otp:
s = Signer(FLASK_SECRET)
ret["mfa_key"] = s.sign(str(user.id))
ret["api_key"] = None
else:
api_key = ApiKey.get_by(user_id=user.id, name=device)
if not api_key:
LOG.d("create new api key for %s and %s", user, device)
api_key = ApiKey.create(user.id, device)
Session.commit()
ret["mfa_key"] = None
ret["api_key"] = api_key.code
# so user is automatically logged in on the web
login_user(user)
return ret
@api_bp.route("/auth/forgot_password", methods=["POST"])
@limiter.limit("10/minute")
def forgot_password():
"""
User forgot password
Input:
email
Output:
200 and a reset password email is sent to user
400 if email not exist
"""
data = request.get_json()
if not data or not data.get("email"):
return jsonify(error="request body must contain email"), 400
email = sanitize_email(data.get("email"))
canonical_email = canonicalize_email(data.get("email"))
user = User.get_by(email=email) or User.get_by(email=canonical_email)
if user:
send_reset_password_email(user)
return jsonify(ok=True)

View File

@ -0,0 +1,75 @@
import pyotp
from flask import jsonify, request
from flask_login import login_user
from itsdangerous import Signer
from app.api.base import api_bp
from app.config import FLASK_SECRET
from app.db import Session
from app.email_utils import send_invalid_totp_login_email
from app.extensions import limiter
from app.log import LOG
from app.models import User, ApiKey
@api_bp.route("/auth/mfa", methods=["POST"])
@limiter.limit("10/minute")
def auth_mfa():
"""
Validate the OTP Token
Input:
mfa_token: OTP token that user enters
mfa_key: MFA key obtained in previous auth request, e.g. /api/auth/login
device: the device name, used to create an ApiKey associated with this device
Output:
200 and user info containing:
{
name: "John Wick",
api_key: "a long string",
email: "user email"
}
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
mfa_token = data.get("mfa_token")
mfa_key = data.get("mfa_key")
device = data.get("device")
s = Signer(FLASK_SECRET)
try:
user_id = int(s.unsign(mfa_key))
except Exception:
return jsonify(error="Invalid mfa_key"), 400
user = User.get(user_id)
if not user:
return jsonify(error="Invalid mfa_key"), 400
elif not user.enable_otp:
return (
jsonify(error="This endpoint should only be used by user who enables MFA"),
400,
)
totp = pyotp.TOTP(user.otp_secret)
if not totp.verify(mfa_token, valid_window=2):
send_invalid_totp_login_email(user, "TOTP")
return jsonify(error="Wrong TOTP Token"), 400
ret = {"name": user.name or "", "email": user.email}
api_key = ApiKey.get_by(user_id=user.id, name=device)
if not api_key:
LOG.d("create new api key for %s and %s", user, device)
api_key = ApiKey.create(user.id, device)
Session.commit()
ret["api_key"] = api_key.code
# so user is logged in automatically on the web
login_user(user)
return jsonify(**ret), 200

View File

@ -0,0 +1,126 @@
from flask import g, request
from flask import jsonify
from app.api.base import api_bp, require_api_auth
from app.db import Session
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox
def custom_domain_to_dict(custom_domain: CustomDomain):
return {
"id": custom_domain.id,
"domain_name": custom_domain.domain,
"is_verified": custom_domain.verified,
"nb_alias": custom_domain.nb_alias(),
"creation_date": custom_domain.created_at.format(),
"creation_timestamp": custom_domain.created_at.timestamp,
"catch_all": custom_domain.catch_all,
"name": custom_domain.name,
"random_prefix_generation": custom_domain.random_prefix_generation,
"mailboxes": [
{"id": mb.id, "email": mb.email} for mb in custom_domain.mailboxes
],
}
@api_bp.route("/custom_domains", methods=["GET"])
@require_api_auth
def get_custom_domains():
user = g.user
custom_domains = CustomDomain.filter_by(
user_id=user.id, is_sl_subdomain=False
).all()
return jsonify(custom_domains=[custom_domain_to_dict(cd) for cd in custom_domains])
@api_bp.route("/custom_domains/<int:custom_domain_id>/trash", methods=["GET"])
@require_api_auth
def get_custom_domain_trash(custom_domain_id: int):
user = g.user
custom_domain = CustomDomain.get(custom_domain_id)
if not custom_domain or custom_domain.user_id != user.id:
return jsonify(error="Forbidden"), 403
domain_deleted_aliases = DomainDeletedAlias.filter_by(
domain_id=custom_domain.id
).all()
return jsonify(
aliases=[
{
"alias": dda.email,
"deletion_timestamp": dda.created_at.timestamp,
}
for dda in domain_deleted_aliases
]
)
@api_bp.route("/custom_domains/<int:custom_domain_id>", methods=["PATCH"])
@require_api_auth
def update_custom_domain(custom_domain_id):
"""
Update alias note
Input:
custom_domain_id: in url
In body:
catch_all (optional): boolean
random_prefix_generation (optional): boolean
name (optional): in body
mailbox_ids (optional): array of mailbox_id
Output:
200
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
user = g.user
custom_domain: CustomDomain = CustomDomain.get(custom_domain_id)
if not custom_domain or custom_domain.user_id != user.id:
return jsonify(error="Forbidden"), 403
changed = False
if "catch_all" in data:
catch_all = data.get("catch_all")
custom_domain.catch_all = catch_all
changed = True
if "random_prefix_generation" in data:
random_prefix_generation = data.get("random_prefix_generation")
custom_domain.random_prefix_generation = random_prefix_generation
changed = True
if "name" in data:
name = data.get("name")
custom_domain.name = name
changed = True
if "mailbox_ids" in data:
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
if mailbox_ids:
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
return jsonify(error="Forbidden"), 400
mailboxes.append(mailbox)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
changed = True
if changed:
Session.commit()
# refresh
custom_domain = CustomDomain.get(custom_domain_id)
return jsonify(custom_domain=custom_domain_to_dict(custom_domain)), 200

View File

@ -0,0 +1,49 @@
from flask import g
from flask import jsonify
from app.api.base import api_bp, require_api_auth
from app.models import Alias, Client, CustomDomain
from app.alias_utils import alias_export_csv
@api_bp.route("/export/data", methods=["GET"])
@require_api_auth
def export_data():
"""
Get user data
Output:
Alias, custom domain and app info
"""
user = g.user
data = {
"email": user.email,
"name": user.name,
"aliases": [],
"apps": [],
"custom_domains": [],
}
for alias in Alias.filter_by(user_id=user.id).all(): # type: Alias
data["aliases"].append(dict(email=alias.email, enabled=alias.enabled))
for custom_domain in CustomDomain.filter_by(user_id=user.id).all():
data["custom_domains"].append(custom_domain.domain)
for app in Client.filter_by(user_id=user.id): # type: Client
data["apps"].append(dict(name=app.name, home_url=app.home_url))
return jsonify(data)
@api_bp.route("/export/aliases", methods=["GET"])
@require_api_auth
def export_aliases():
"""
Get user aliases as importable CSV file
Output:
Importable CSV file
"""
return alias_export_csv(g.user)

View File

@ -0,0 +1,208 @@
from smtplib import SMTPRecipientsRefused
import arrow
from flask import g
from flask import jsonify
from flask import request
from app.api.base import api_bp, require_api_auth
from app.config import JOB_DELETE_MAILBOX
from app.dashboard.views.mailbox import send_verification_email
from app.dashboard.views.mailbox_detail import verify_mailbox_change
from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
is_valid_email,
)
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import sanitize_email
def mailbox_to_dict(mailbox: Mailbox):
return {
"id": mailbox.id,
"email": mailbox.email,
"verified": mailbox.verified,
"default": mailbox.user.default_mailbox_id == mailbox.id,
"creation_timestamp": mailbox.created_at.timestamp,
"nb_alias": mailbox.nb_alias(),
}
@api_bp.route("/mailboxes", methods=["POST"])
@require_api_auth
def create_mailbox():
"""
Create a new mailbox. User needs to verify the mailbox via an activation email.
Input:
email: in body
Output:
the new mailbox dict
"""
user = g.user
mailbox_email = sanitize_email(request.get_json().get("email"))
if not user.is_premium():
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
if not is_valid_email(mailbox_email):
return jsonify(error=f"{mailbox_email} invalid"), 400
elif mailbox_already_used(mailbox_email, user):
return jsonify(error=f"{mailbox_email} already used"), 400
elif not email_can_be_used_as_mailbox(mailbox_email):
return (
jsonify(
error=f"{mailbox_email} cannot be used. Please note a mailbox cannot "
f"be a disposable email address"
),
400,
)
else:
new_mailbox = Mailbox.create(email=mailbox_email, user_id=user.id)
Session.commit()
send_verification_email(user, new_mailbox)
return (
jsonify(mailbox_to_dict(new_mailbox)),
201,
)
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["DELETE"])
@require_api_auth
def delete_mailbox(mailbox_id):
"""
Delete mailbox
Input:
mailbox_id: in url
Output:
200 if deleted successfully
"""
user = g.user
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
return jsonify(error="Forbidden"), 403
if mailbox.id == user.default_mailbox_id:
return jsonify(error="You cannot delete the default mailbox"), 400
# Schedule delete account job
LOG.w("schedule delete mailbox job for %s", mailbox)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={"mailbox_id": mailbox.id},
run_at=arrow.now(),
commit=True,
)
return jsonify(deleted=True), 200
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["PUT"])
@require_api_auth
def update_mailbox(mailbox_id):
"""
Update mailbox
Input:
mailbox_id: in url
(optional) default: in body. Set a mailbox as the default mailbox.
(optional) email: in body. Change a mailbox email.
(optional) cancel_email_change: in body. Cancel mailbox email change.
Output:
200 if updated successfully
"""
user = g.user
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
return jsonify(error="Forbidden"), 403
data = request.get_json() or {}
changed = False
if "default" in data:
is_default = data.get("default")
if is_default:
if not mailbox.verified:
return (
jsonify(
error="Unverified mailbox cannot be used as default mailbox"
),
400,
)
user.default_mailbox_id = mailbox.id
changed = True
if "email" in data:
new_email = sanitize_email(data.get("email"))
if mailbox_already_used(new_email, user):
return jsonify(error=f"{new_email} already used"), 400
elif not email_can_be_used_as_mailbox(new_email):
return (
jsonify(
error=f"{new_email} cannot be used. Please note a mailbox cannot "
f"be a disposable email address"
),
400,
)
try:
verify_mailbox_change(user, mailbox, new_email)
except SMTPRecipientsRefused:
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
else:
mailbox.new_email = new_email
changed = True
if "cancel_email_change" in data:
cancel_email_change = data.get("cancel_email_change")
if cancel_email_change:
mailbox.new_email = None
changed = True
if changed:
Session.commit()
return jsonify(updated=True), 200
@api_bp.route("/mailboxes", methods=["GET"])
@require_api_auth
def get_mailboxes():
"""
Get verified mailboxes
Output:
- mailboxes: list of mailbox dict
"""
user = g.user
return (
jsonify(mailboxes=[mailbox_to_dict(mb) for mb in user.mailboxes()]),
200,
)
@api_bp.route("/v2/mailboxes", methods=["GET"])
@require_api_auth
def get_mailboxes_v2():
"""
Get all mailboxes - including unverified mailboxes
Output:
- mailboxes: list of mailbox dict
"""
user = g.user
mailboxes = []
for mailbox in Mailbox.filter_by(user_id=user.id):
mailboxes.append(mailbox)
return (
jsonify(mailboxes=[mailbox_to_dict(mb) for mb in mailboxes]),
200,
)

View File

@ -0,0 +1,235 @@
from flask import g
from flask import jsonify, request
from app import parallel_limiter
from app.alias_suffix import check_suffix_signature, verify_prefix_suffix
from app.alias_utils import check_alias_prefix
from app.api.base import api_bp, require_api_auth
from app.api.serializer import (
serialize_alias_info_v2,
get_alias_info_v2,
)
from app.config import MAX_NB_EMAIL_FREE_PLAN, ALIAS_LIMIT
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
AliasUsedOn,
User,
DeletedAlias,
DomainDeletedAlias,
Mailbox,
AliasMailbox,
)
from app.utils import convert_to_id
@api_bp.route("/v2/alias/custom/new", methods=["POST"])
@limiter.limit(ALIAS_LIMIT)
@require_api_auth
@parallel_limiter.lock(name="alias_creation")
def new_custom_alias_v2():
"""
Create a new custom alias
Same as v1 but signed_suffix is actually the suffix with signature, e.g.
.random_word@SL.co.Xq19rQ.s99uWQ7jD1s5JZDZqczYI5TbNNU
Input:
alias_prefix, for ex "www_groupon_com"
signed_suffix, either .random_letters@simplelogin.co or @my-domain.com
optional "hostname" in args
optional "note"
Output:
201 if success
409 if the alias already exists
"""
user: User = g.user
if not user.can_create_new_alias():
LOG.d("user %s cannot create any custom alias", user)
return (
jsonify(
error="You have reached the limitation of a free account with the maximum of "
f"{MAX_NB_EMAIL_FREE_PLAN} aliases, please upgrade your plan to create more aliases"
),
400,
)
hostname = request.args.get("hostname")
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
signed_suffix = data.get("signed_suffix", "").strip()
note = data.get("note")
alias_prefix = convert_to_id(alias_prefix)
try:
alias_suffix = check_suffix_signature(signed_suffix)
if not alias_suffix:
LOG.w("Alias creation time expired for %s", user)
return jsonify(error="Alias creation time is expired, please retry"), 412
except Exception:
LOG.w("Alias suffix is tampered, user %s", user)
return jsonify(error="Tampered suffix"), 400
if not verify_prefix_suffix(user, alias_prefix, alias_suffix):
return jsonify(error="wrong alias prefix or suffix"), 400
full_alias = alias_prefix + alias_suffix
if (
Alias.get_by(email=full_alias)
or DeletedAlias.get_by(email=full_alias)
or DomainDeletedAlias.get_by(email=full_alias)
):
LOG.d("full alias already used %s", full_alias)
return jsonify(error=f"alias {full_alias} already exists"), 409
if ".." in full_alias:
return (
jsonify(error="2 consecutive dot signs aren't allowed in an email address"),
400,
)
alias = Alias.create(
user_id=user.id,
email=full_alias,
mailbox_id=user.default_mailbox_id,
note=note,
)
Session.commit()
if hostname:
AliasUsedOn.create(alias_id=alias.id, hostname=hostname, user_id=alias.user_id)
Session.commit()
return (
jsonify(alias=full_alias, **serialize_alias_info_v2(get_alias_info_v2(alias))),
201,
)
@api_bp.route("/v3/alias/custom/new", methods=["POST"])
@limiter.limit(ALIAS_LIMIT)
@require_api_auth
@parallel_limiter.lock(name="alias_creation")
def new_custom_alias_v3():
"""
Create a new custom alias
Same as v2 but accept a list of mailboxes as input
Input:
alias_prefix, for ex "www_groupon_com"
signed_suffix, either .random_letters@simplelogin.co or @my-domain.com
mailbox_ids: list of int
optional "hostname" in args
optional "note"
optional "name"
Output:
201 if success
409 if the alias already exists
"""
user: User = g.user
if not user.can_create_new_alias():
LOG.d("user %s cannot create any custom alias", user)
return (
jsonify(
error="You have reached the limitation of a free account with the maximum of "
f"{MAX_NB_EMAIL_FREE_PLAN} aliases, please upgrade your plan to create more aliases"
),
400,
)
hostname = request.args.get("hostname")
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
if type(data) is not dict:
return jsonify(error="request body does not follow the required format"), 400
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
signed_suffix = data.get("signed_suffix", "") or ""
signed_suffix = signed_suffix.strip()
mailbox_ids = data.get("mailbox_ids")
note = data.get("note")
name = data.get("name")
if name:
name = name.replace("\n", "")
alias_prefix = convert_to_id(alias_prefix)
if not check_alias_prefix(alias_prefix):
return jsonify(error="alias prefix invalid format or too long"), 400
# check if mailbox is not tempered with
if type(mailbox_ids) is not list:
return jsonify(error="mailbox_ids must be an array of id"), 400
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
return jsonify(error="Errors with Mailbox"), 400
mailboxes.append(mailbox)
if not mailboxes:
return jsonify(error="At least one mailbox must be selected"), 400
# hypothesis: user will click on the button in the 600 secs
try:
alias_suffix = check_suffix_signature(signed_suffix)
if not alias_suffix:
LOG.w("Alias creation time expired for %s", user)
return jsonify(error="Alias creation time is expired, please retry"), 412
except Exception:
LOG.w("Alias suffix is tampered, user %s", user)
return jsonify(error="Tampered suffix"), 400
if not verify_prefix_suffix(user, alias_prefix, alias_suffix):
return jsonify(error="wrong alias prefix or suffix"), 400
full_alias = alias_prefix + alias_suffix
if (
Alias.get_by(email=full_alias)
or DeletedAlias.get_by(email=full_alias)
or DomainDeletedAlias.get_by(email=full_alias)
):
LOG.d("full alias already used %s", full_alias)
return jsonify(error=f"alias {full_alias} already exists"), 409
if ".." in full_alias:
return (
jsonify(error="2 consecutive dot signs aren't allowed in an email address"),
400,
)
alias = Alias.create(
user_id=user.id,
email=full_alias,
note=note,
name=name or None,
mailbox_id=mailboxes[0].id,
)
Session.flush()
for i in range(1, len(mailboxes)):
AliasMailbox.create(
alias_id=alias.id,
mailbox_id=mailboxes[i].id,
)
Session.commit()
if hostname:
AliasUsedOn.create(alias_id=alias.id, hostname=hostname, user_id=alias.user_id)
Session.commit()
return (
jsonify(alias=full_alias, **serialize_alias_info_v2(get_alias_info_v2(alias))),
201,
)

View File

@ -0,0 +1,117 @@
import tldextract
from flask import g
from flask import jsonify, request
from app import parallel_limiter
from app.alias_suffix import get_alias_suffixes
from app.api.base import api_bp, require_api_auth
from app.api.serializer import (
get_alias_info_v2,
serialize_alias_info_v2,
)
from app.config import MAX_NB_EMAIL_FREE_PLAN, ALIAS_LIMIT
from app.db import Session
from app.errors import AliasInTrashError
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, AliasUsedOn, AliasGeneratorEnum
from app.utils import convert_to_id
@api_bp.route("/alias/random/new", methods=["POST"])
@limiter.limit(ALIAS_LIMIT)
@require_api_auth
@parallel_limiter.lock(name="alias_creation")
def new_random_alias():
"""
Create a new random alias
Input:
(Optional) note
Output:
201 if success
"""
user = g.user
if not user.can_create_new_alias():
LOG.d("user %s cannot create new random alias", user)
return (
jsonify(
error=f"You have reached the limitation of a free account with the maximum of "
f"{MAX_NB_EMAIL_FREE_PLAN} aliases, please upgrade your plan to create more aliases"
),
400,
)
note = None
data = request.get_json(silent=True)
if data:
note = data.get("note")
alias = None
# custom alias suggestion and suffix
hostname = request.args.get("hostname")
if hostname and user.include_website_in_one_click_alias:
LOG.d("Use %s to create new alias", hostname)
# keep only the domain name of hostname, ignore TLD and subdomain
# for ex www.groupon.com -> groupon
ext = tldextract.extract(hostname)
prefix_suggestion = ext.domain
prefix_suggestion = convert_to_id(prefix_suggestion)
suffixes = get_alias_suffixes(user)
# use the first suffix
suggested_alias = prefix_suggestion + suffixes[0].suffix
alias = Alias.get_by(email=suggested_alias)
# cannot use this alias as it belongs to another user
if alias and not alias.user_id == user.id:
LOG.d("%s belongs to another user", alias)
alias = None
elif alias and alias.user_id == user.id:
# make sure alias was created for this website
if AliasUsedOn.get_by(
alias_id=alias.id, hostname=hostname, user_id=alias.user_id
):
LOG.d("Use existing alias %s", alias)
else:
LOG.d("%s wasn't created for this website %s", alias, hostname)
alias = None
elif not alias:
LOG.d("create new alias %s", suggested_alias)
try:
alias = Alias.create(
user_id=user.id,
email=suggested_alias,
note=note,
mailbox_id=user.default_mailbox_id,
commit=True,
)
except AliasInTrashError:
LOG.i("Alias %s is in trash", suggested_alias)
alias = None
if not alias:
scheme = user.alias_generator
mode = request.args.get("mode")
if mode:
if mode == "word":
scheme = AliasGeneratorEnum.word.value
elif mode == "uuid":
scheme = AliasGeneratorEnum.uuid.value
else:
return jsonify(error=f"{mode} must be either word or uuid"), 400
alias = Alias.create_new_random(user=user, scheme=scheme, note=note)
Session.commit()
if hostname and not AliasUsedOn.get_by(alias_id=alias.id, hostname=hostname):
AliasUsedOn.create(
alias_id=alias.id, hostname=hostname, user_id=alias.user_id, commit=True
)
return (
jsonify(alias=alias.email, **serialize_alias_info_v2(get_alias_info_v2(alias))),
201,
)

View File

@ -0,0 +1,83 @@
from flask import g
from flask import jsonify
from flask import request
from app.api.base import api_bp, require_api_auth
from app.config import PAGE_LIMIT
from app.db import Session
from app.models import Notification
@api_bp.route("/notifications", methods=["GET"])
@require_api_auth
def get_notifications():
"""
Get notifications
Input:
- page: in url. Starts at 0
Output:
- more: boolean. Whether there's more notification to load
- notifications: list of notifications.
- id
- message
- title
- read
- created_at
"""
user = g.user
try:
page = int(request.args.get("page"))
except (ValueError, TypeError):
return jsonify(error="page must be provided in request query"), 400
notifications = (
Notification.filter_by(user_id=user.id)
.order_by(Notification.read, Notification.created_at.desc())
.limit(PAGE_LIMIT + 1) # load a record more to know whether there's more
.offset(page * PAGE_LIMIT)
.all()
)
have_more = len(notifications) > PAGE_LIMIT
return (
jsonify(
more=have_more,
notifications=[
{
"id": notification.id,
"message": notification.message,
"title": notification.title,
"read": notification.read,
"created_at": notification.created_at.humanize(),
}
for notification in notifications[:PAGE_LIMIT]
],
),
200,
)
@api_bp.route("/notifications/<int:notification_id>/read", methods=["POST"])
@require_api_auth
def mark_as_read(notification_id):
"""
Mark a notification as read
Input:
notification_id: in url
Output:
200 if updated successfully
"""
user = g.user
notification = Notification.get(notification_id)
if not notification or notification.user_id != user.id:
return jsonify(error="Forbidden"), 403
notification.read = True
Session.commit()
return jsonify(done=True), 200

View File

@ -0,0 +1,51 @@
import arrow
from flask import g
from flask import jsonify
from app.api.base import api_bp, require_api_auth
from app.models import (
PhoneReservation,
PhoneMessage,
)
@api_bp.route("/phone/reservations/<int:reservation_id>", methods=["GET", "POST"])
@require_api_auth
def phone_messages(reservation_id):
"""
Return messages during this reservation
Output:
- messages: list of alias:
- id
- from_number
- body
- created_at: e.g. 5 minutes ago
"""
user = g.user
reservation: PhoneReservation = PhoneReservation.get(reservation_id)
if not reservation or reservation.user_id != user.id:
return jsonify(error="Invalid reservation"), 400
phone_number = reservation.number
messages = PhoneMessage.filter(
PhoneMessage.number_id == phone_number.id,
PhoneMessage.created_at > reservation.start,
PhoneMessage.created_at < reservation.end,
).all()
return (
jsonify(
messages=[
{
"id": message.id,
"from_number": message.from_number,
"body": message.body,
"created_at": message.created_at.humanize(),
}
for message in messages
],
ended=reservation.end < arrow.now(),
),
200,
)

View File

@ -0,0 +1,148 @@
import arrow
from flask import jsonify, g, request
from app.api.base import api_bp, require_api_auth
from app.db import Session
from app.log import LOG
from app.models import (
User,
AliasGeneratorEnum,
SLDomain,
CustomDomain,
SenderFormatEnum,
AliasSuffixEnum,
)
from app.proton.utils import perform_proton_account_unlink
def setting_to_dict(user: User):
ret = {
"notification": user.notification,
"alias_generator": "word"
if user.alias_generator == AliasGeneratorEnum.word.value
else "uuid",
"random_alias_default_domain": user.default_random_alias_domain(),
# return the default sender format (AT) in case user uses a non-supported sender format
"sender_format": SenderFormatEnum.get_name(user.sender_format)
or SenderFormatEnum.AT.name,
"random_alias_suffix": AliasSuffixEnum.get_name(user.random_alias_suffix),
}
return ret
@api_bp.route("/setting")
@require_api_auth
def get_setting():
"""
Return user setting
"""
user = g.user
return jsonify(setting_to_dict(user))
@api_bp.route("/setting", methods=["PATCH"])
@require_api_auth
def update_setting():
"""
Update user setting
Input:
- notification: bool
- alias_generator: word|uuid
- random_alias_default_domain: str
"""
user = g.user
data = request.get_json() or {}
if "notification" in data:
user.notification = data["notification"]
if "alias_generator" in data:
alias_generator = data["alias_generator"]
if alias_generator not in ["word", "uuid"]:
return jsonify(error="Invalid alias_generator"), 400
if alias_generator == "word":
user.alias_generator = AliasGeneratorEnum.word.value
else:
user.alias_generator = AliasGeneratorEnum.uuid.value
if "sender_format" in data:
sender_format = data["sender_format"]
if not SenderFormatEnum.has_name(sender_format):
return jsonify(error="Invalid sender_format"), 400
user.sender_format = SenderFormatEnum.get_value(sender_format)
user.sender_format_updated_at = arrow.now()
if "random_alias_suffix" in data:
random_alias_suffix = data["random_alias_suffix"]
if not AliasSuffixEnum.has_name(random_alias_suffix):
return jsonify(error="Invalid random_alias_suffix"), 400
user.random_alias_suffix = AliasSuffixEnum.get_value(random_alias_suffix)
if "random_alias_default_domain" in data:
default_domain = data["random_alias_default_domain"]
sl_domain: SLDomain = SLDomain.get_by(domain=default_domain)
if sl_domain:
if sl_domain.premium_only and not user.is_premium():
return jsonify(error="You cannot use this domain"), 400
user.default_alias_public_domain_id = sl_domain.id
user.default_alias_custom_domain_id = None
else:
custom_domain = CustomDomain.get_by(domain=default_domain)
if not custom_domain:
return jsonify(error="invalid domain"), 400
# sanity check
if custom_domain.user_id != user.id or not custom_domain.verified:
LOG.w("%s cannot use domain %s", user, default_domain)
return jsonify(error="invalid domain"), 400
else:
user.default_alias_custom_domain_id = custom_domain.id
user.default_alias_public_domain_id = None
Session.commit()
return jsonify(setting_to_dict(user))
@api_bp.route("/setting/domains")
@require_api_auth
def get_available_domains_for_random_alias():
"""
Available domains for random alias
"""
user = g.user
ret = [
(is_sl, domain) for is_sl, domain in user.available_domains_for_random_alias()
]
return jsonify(ret)
@api_bp.route("/v2/setting/domains")
@require_api_auth
def get_available_domains_for_random_alias_v2():
"""
Available domains for random alias
"""
user = g.user
ret = [
{"domain": domain, "is_custom": not is_sl}
for is_sl, domain in user.available_domains_for_random_alias()
]
return jsonify(ret)
@api_bp.route("/setting/unlink_proton_account", methods=["DELETE"])
@require_api_auth
def unlink_proton_account():
user = g.user
perform_proton_account_unlink(user)
return jsonify({"ok": True})

27
app/app/api/views/sudo.py Normal file
View File

@ -0,0 +1,27 @@
from flask import jsonify, g, request
from sqlalchemy_utils.types.arrow import arrow
from app.api.base import api_bp, require_api_auth
from app.db import Session
@api_bp.route("/sudo", methods=["PATCH"])
@require_api_auth
def enter_sudo():
"""
Enter sudo mode
Input
- password: user password to validate request to enter sudo mode
"""
user = g.user
data = request.get_json() or {}
if "password" not in data:
return jsonify(error="Invalid password"), 403
if not user.check_password(data["password"]):
return jsonify(error="Invalid password"), 403
g.api_key.sudo_mode_at = arrow.now()
Session.commit()
return jsonify(ok=True)

46
app/app/api/views/user.py Normal file
View File

@ -0,0 +1,46 @@
from flask import jsonify, g
from sqlalchemy_utils.types.arrow import arrow
from app.api.base import api_bp, require_api_sudo, require_api_auth
from app import config
from app.extensions import limiter
from app.log import LOG
from app.models import Job, ApiToCookieToken
@api_bp.route("/user", methods=["DELETE"])
@require_api_sudo
def delete_user():
"""
Delete the user. Requires sudo mode.
"""
# Schedule delete account job
LOG.w("schedule delete account job for %s", g.user)
Job.create(
name=config.JOB_DELETE_ACCOUNT,
payload={"user_id": g.user.id},
run_at=arrow.now(),
commit=True,
)
return jsonify(ok=True)
@api_bp.route("/user/cookie_token", methods=["GET"])
@require_api_auth
@limiter.limit("5/minute")
def get_api_session_token():
"""
Get a temporary token to exchange it for a cookie based session
Output:
200 and a temporary random token
{
token: "asdli3ldq39h9hd3",
}
"""
token = ApiToCookieToken.create(
user=g.user,
api_key_id=g.api_key.id,
commit=True,
)
return jsonify({"token": token.code})

View File

@ -0,0 +1,138 @@
import base64
from io import BytesIO
from typing import Optional
from flask import jsonify, g, request, make_response
from app import s3, config
from app.api.base import api_bp, require_api_auth
from app.config import SESSION_COOKIE_NAME
from app.db import Session
from app.models import ApiKey, File, PartnerUser, User
from app.proton.utils import get_proton_partner
from app.session import logout_session
from app.utils import random_string
def get_connected_proton_address(user: User) -> Optional[str]:
proton_partner = get_proton_partner()
partner_user = PartnerUser.get_by(user_id=user.id, partner_id=proton_partner.id)
if partner_user is None:
return None
return partner_user.partner_email
def user_to_dict(user: User) -> dict:
ret = {
"name": user.name or "",
"is_premium": user.is_premium(),
"email": user.email,
"in_trial": user.in_trial(),
"max_alias_free_plan": user.max_alias_for_free_account(),
"connected_proton_address": None,
}
if config.CONNECT_WITH_PROTON:
ret["connected_proton_address"] = get_connected_proton_address(user)
if user.profile_picture_id:
ret["profile_picture_url"] = user.profile_picture.get_url()
else:
ret["profile_picture_url"] = None
return ret
@api_bp.route("/user_info")
@require_api_auth
def user_info():
"""
Return user info given the api-key
Output as json
- name
- is_premium
- email
- in_trial
- max_alias_free
- is_connected_with_proton
"""
user = g.user
return jsonify(user_to_dict(user))
@api_bp.route("/user_info", methods=["PATCH"])
@require_api_auth
def update_user_info():
"""
Input
- profile_picture (optional): base64 of the profile picture. Set to null to remove the profile picture
- name (optional)
"""
user = g.user
data = request.get_json() or {}
if "profile_picture" in data:
if data["profile_picture"] is None:
if user.profile_picture_id:
file = user.profile_picture
user.profile_picture_id = None
Session.flush()
if file:
File.delete(file.id)
s3.delete(file.path)
Session.flush()
else:
raw_data = base64.decodebytes(data["profile_picture"].encode())
file_path = random_string(30)
file = File.create(user_id=user.id, path=file_path)
Session.flush()
s3.upload_from_bytesio(file_path, BytesIO(raw_data))
user.profile_picture_id = file.id
Session.flush()
if "name" in data:
user.name = data["name"]
Session.commit()
return jsonify(user_to_dict(user))
@api_bp.route("/api_key", methods=["POST"])
@require_api_auth
def create_api_key():
"""Used to create a new api key
Input:
- device
Output:
- api_key
"""
data = request.get_json()
if not data:
return jsonify(error="request body cannot be empty"), 400
device = data.get("device")
api_key = ApiKey.create(user_id=g.user.id, name=device)
Session.commit()
return jsonify(api_key=api_key.code), 201
@api_bp.route("/logout", methods=["GET"])
@require_api_auth
def logout():
"""
Log user out on the web, i.e. remove the cookie
Output:
- 200
"""
logout_session()
response = make_response(jsonify(msg="User is logged out"), 200)
response.delete_cookie(SESSION_COOKIE_NAME)
return response

19
app/app/auth/__init__.py Normal file
View File

@ -0,0 +1,19 @@
from .views import (
login,
logout,
register,
activate,
resend_activation,
reset_password,
forgot_password,
github,
google,
facebook,
proton,
change_email,
mfa,
fido,
social,
recovery,
api_to_cookie,
)

5
app/app/auth/base.py Normal file
View File

@ -0,0 +1,5 @@
from flask import Blueprint
auth_bp = Blueprint(
name="auth", import_name=__name__, url_prefix="/auth", template_folder="templates"
)

View File

View File

@ -0,0 +1,69 @@
from flask import request, redirect, url_for, flash, render_template, g
from flask_login import login_user, current_user
from app import email_utils
from app.auth.base import auth_bp
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import ActivationCode
from app.utils import sanitize_next_url
@auth_bp.route("/activate", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def activate():
if current_user.is_authenticated:
return (
render_template("auth/activate.html", error="You are already logged in"),
400,
)
code = request.args.get("code")
activation_code: ActivationCode = ActivationCode.get_by(code=code)
if not activation_code:
# Trigger rate limiter
g.deduct_limit = True
return (
render_template(
"auth/activate.html", error="Activation code cannot be found"
),
400,
)
if activation_code.is_expired():
return (
render_template(
"auth/activate.html",
error="Activation code was expired",
show_resend_activation=True,
),
400,
)
user = activation_code.user
user.activated = True
login_user(user)
# activation code is to be used only once
ActivationCode.delete(activation_code.id)
Session.commit()
flash("Your account has been activated", "success")
email_utils.send_welcome_email(user)
# The activation link contains the original page, for ex authorize page
if "next" in request.args:
next_url = sanitize_next_url(request.args.get("next"))
LOG.d("redirect user to %s", next_url)
return redirect(next_url)
else:
LOG.d("redirect user to dashboard")
return redirect(url_for("dashboard.index"))
# todo: redirect to account_activated page when more features are added into the browser extension
# return redirect(url_for("onboarding.account_activated"))

View File

@ -0,0 +1,30 @@
import arrow
from flask import redirect, url_for, request, flash
from flask_login import login_user
from app.auth.base import auth_bp
from app.models import ApiToCookieToken
from app.utils import sanitize_next_url
@auth_bp.route("/api_to_cookie", methods=["GET"])
def api_to_cookie():
code = request.args.get("token")
if not code:
flash("Missing token", "error")
return redirect(url_for("auth.login"))
token = ApiToCookieToken.get_by(code=code)
if not token or token.created_at < arrow.now().shift(minutes=-5):
flash("Missing token", "error")
return redirect(url_for("auth.login"))
user = token.user
ApiToCookieToken.delete(token.id, commit=True)
login_user(user)
next_url = sanitize_next_url(request.args.get("next"))
if next_url:
return redirect(next_url)
else:
return redirect(url_for("dashboard.index"))

View File

@ -0,0 +1,35 @@
from flask import request, flash, render_template, redirect, url_for
from flask_login import login_user
from app.auth.base import auth_bp
from app.db import Session
from app.models import EmailChange, ResetPasswordCode
@auth_bp.route("/change_email", methods=["GET", "POST"])
def change_email():
code = request.args.get("code")
email_change: EmailChange = EmailChange.get_by(code=code)
if not email_change:
return render_template("auth/change_email.html")
if email_change.is_expired():
# delete the expired email
EmailChange.delete(email_change.id)
Session.commit()
return render_template("auth/change_email.html")
user = email_change.user
user.email = email_change.new_email
EmailChange.delete(email_change.id)
ResetPasswordCode.filter_by(user_id=user.id).delete()
Session.commit()
flash("Your new email has been updated", "success")
login_user(user)
return redirect(url_for("dashboard.index"))

View File

@ -0,0 +1,127 @@
from flask import request, session, redirect, url_for, flash
from requests_oauthlib import OAuth2Session
from requests_oauthlib.compliance_fixes import facebook_compliance_fix
from app.auth.base import auth_bp
from app.auth.views.google import create_file_from_url
from app.config import (
URL,
FACEBOOK_CLIENT_ID,
FACEBOOK_CLIENT_SECRET,
)
from app.db import Session
from app.log import LOG
from app.models import User, SocialAuth
from .login_utils import after_login
from ...utils import sanitize_email, sanitize_next_url
_authorization_base_url = "https://www.facebook.com/dialog/oauth"
_token_url = "https://graph.facebook.com/oauth/access_token"
_scope = ["email"]
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
_redirect_uri = URL + "/auth/facebook/callback"
@auth_bp.route("/facebook/login")
def facebook_login():
# to avoid flask-login displaying the login error message
session.pop("_flashes", None)
next_url = sanitize_next_url(request.args.get("next"))
# Facebook does not allow to append param to redirect_uri
# we need to pass the next url by session
if next_url:
session["facebook_next_url"] = next_url
facebook = OAuth2Session(
FACEBOOK_CLIENT_ID, scope=_scope, redirect_uri=_redirect_uri
)
facebook = facebook_compliance_fix(facebook)
authorization_url, state = facebook.authorization_url(_authorization_base_url)
# State is used to prevent CSRF, keep this for later.
session["oauth_state"] = state
return redirect(authorization_url)
@auth_bp.route("/facebook/callback")
def facebook_callback():
# user clicks on cancel
if "error" in request.args:
flash("Please use another sign in method then", "warning")
return redirect("/")
facebook = OAuth2Session(
FACEBOOK_CLIENT_ID,
state=session["oauth_state"],
scope=_scope,
redirect_uri=_redirect_uri,
)
facebook = facebook_compliance_fix(facebook)
facebook.fetch_token(
_token_url,
client_secret=FACEBOOK_CLIENT_SECRET,
authorization_response=request.url,
)
# Fetch a protected resource, i.e. user profile
# {
# "email": "abcd@gmail.com",
# "id": "1234",
# "name": "First Last",
# "picture": {
# "data": {
# "url": "long_url"
# }
# }
# }
facebook_user_data = facebook.get(
"https://graph.facebook.com/me?fields=id,name,email,picture{url}"
).json()
email = facebook_user_data.get("email")
# user choose to not share email, cannot continue
if not email:
flash(
"In order to use SimpleLogin, you need to give us a valid email", "warning"
)
return redirect(url_for("auth.register"))
email = sanitize_email(email)
user = User.get_by(email=email)
picture_url = facebook_user_data.get("picture", {}).get("data", {}).get("url")
if user:
if picture_url and not user.profile_picture_id:
LOG.d("set user profile picture to %s", picture_url)
file = create_file_from_url(user, picture_url)
user.profile_picture_id = file.id
Session.commit()
else:
flash(
"Sorry you cannot sign up via Facebook, please use email/password sign-up instead",
"error",
)
return redirect(url_for("auth.register"))
next_url = None
# The activation link contains the original page, for ex authorize page
if "facebook_next_url" in session:
next_url = session["facebook_next_url"]
LOG.d("redirect user to %s", next_url)
# reset the next_url to avoid user getting redirected at each login :)
session.pop("facebook_next_url", None)
if not SocialAuth.get_by(user_id=user.id, social="facebook"):
SocialAuth.create(user_id=user.id, social="facebook")
Session.commit()
return after_login(user, next_url)

173
app/app/auth/views/fido.py Normal file
View File

@ -0,0 +1,173 @@
import json
import secrets
from time import time
import webauthn
from flask import (
request,
render_template,
redirect,
url_for,
flash,
session,
make_response,
g,
)
from flask_login import login_user
from flask_wtf import FlaskForm
from wtforms import HiddenField, validators, BooleanField
from app.auth.base import auth_bp
from app.config import MFA_USER_ID
from app.config import RP_ID, URL
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import User, Fido, MfaBrowser
from app.utils import sanitize_next_url
class FidoTokenForm(FlaskForm):
sk_assertion = HiddenField("sk_assertion", validators=[validators.DataRequired()])
remember = BooleanField(
"attr", default=False, description="Remember this browser for 30 days"
)
@auth_bp.route("/fido", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def fido():
# passed from login page
user_id = session.get(MFA_USER_ID)
# user access this page directly without passing by login page
if not user_id:
flash("Unknown error, redirect back to main page", "warning")
return redirect(url_for("auth.login"))
user = User.get(user_id)
if not (user and user.fido_enabled()):
flash("Only user with security key linked should go to this page", "warning")
return redirect(url_for("auth.login"))
auto_activate = True
fido_token_form = FidoTokenForm()
next_url = sanitize_next_url(request.args.get("next"))
if request.cookies.get("mfa"):
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
if browser and not browser.is_expired() and browser.user_id == user.id:
login_user(user)
flash(f"Welcome back!", "success")
# Redirect user to correct page
return redirect(next_url or url_for("dashboard.index"))
else:
# Trigger rate limiter
g.deduct_limit = True
# Handling POST requests
if fido_token_form.validate_on_submit():
try:
sk_assertion = json.loads(fido_token_form.sk_assertion.data)
except Exception:
flash("Key verification failed. Error: Invalid Payload", "warning")
return redirect(url_for("auth.login"))
challenge = session["fido_challenge"]
try:
fido_key = Fido.get_by(
uuid=user.fido_uuid, credential_id=sk_assertion["id"]
)
webauthn_user = webauthn.WebAuthnUser(
user.fido_uuid,
user.email,
user.name if user.name else user.email,
False,
fido_key.credential_id,
fido_key.public_key,
fido_key.sign_count,
RP_ID,
)
webauthn_assertion_response = webauthn.WebAuthnAssertionResponse(
webauthn_user, sk_assertion, challenge, URL, uv_required=False
)
new_sign_count = webauthn_assertion_response.verify()
except Exception as e:
LOG.w(f"An error occurred in WebAuthn verification process: {e}")
flash("Key verification failed.", "warning")
# Trigger rate limiter
g.deduct_limit = True
auto_activate = False
else:
user.fido_sign_count = new_sign_count
Session.commit()
del session[MFA_USER_ID]
session["sudo_time"] = int(time())
login_user(user)
flash(f"Welcome back!", "success")
# Redirect user to correct page
response = make_response(redirect(next_url or url_for("dashboard.index")))
if fido_token_form.remember.data:
browser = MfaBrowser.create_new(user=user)
Session.commit()
response.set_cookie(
"mfa",
value=browser.token,
expires=browser.expires.datetime,
secure=True if URL.startswith("https") else False,
httponly=True,
samesite="Lax",
)
return response
# Prepare information for key registration process
session.pop("challenge", None)
challenge = secrets.token_urlsafe(32)
session["fido_challenge"] = challenge.rstrip("=")
fidos = Fido.filter_by(uuid=user.fido_uuid).all()
webauthn_users = []
for fido in fidos:
webauthn_users.append(
webauthn.WebAuthnUser(
user.fido_uuid,
user.email,
user.name if user.name else user.email,
False,
fido.credential_id,
fido.public_key,
fido.sign_count,
RP_ID,
)
)
webauthn_assertion_options = webauthn.WebAuthnAssertionOptions(
webauthn_users, challenge
)
webauthn_assertion_options = webauthn_assertion_options.assertion_dict
try:
# HACK: We need to upgrade to webauthn > 1 so it can support specifying the transports
for credential in webauthn_assertion_options["allowCredentials"]:
del credential["transports"]
except KeyError:
# Should never happen but...
pass
return render_template(
"auth/fido.html",
fido_token_form=fido_token_form,
webauthn_assertion_options=webauthn_assertion_options,
enable_otp=user.enable_otp,
auto_activate=auto_activate,
next_url=next_url,
)

View File

@ -0,0 +1,42 @@
from flask import request, render_template, redirect, url_for, flash, g
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.dashboard.views.setting import send_reset_password_email
from app.extensions import limiter
from app.log import LOG
from app.models import User
from app.utils import sanitize_email, canonicalize_email
class ForgotPasswordForm(FlaskForm):
email = StringField("Email", validators=[validators.DataRequired()])
@auth_bp.route("/forgot_password", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def forgot_password():
form = ForgotPasswordForm(request.form)
if form.validate_on_submit():
# Trigger rate limiter
g.deduct_limit = True
flash(
"If your email is correct, you are going to receive an email to reset your password",
"success",
)
email = sanitize_email(form.email.data)
canonical_email = canonicalize_email(email)
user = User.get_by(email=email) or User.get_by(email=canonical_email)
if user:
LOG.d("Send forgot password email to %s", user)
send_reset_password_email(user)
return redirect(url_for("auth.forgot_password"))
return render_template("auth/forgot_password.html", form=form)

View File

@ -0,0 +1,102 @@
from flask import request, session, redirect, flash, url_for
from requests_oauthlib import OAuth2Session
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, URL
from app.db import Session
from app.log import LOG
from app.models import User, SocialAuth
from app.utils import encode_url, sanitize_email, sanitize_next_url
_authorization_base_url = "https://github.com/login/oauth/authorize"
_token_url = "https://github.com/login/oauth/access_token"
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
_redirect_uri = URL + "/auth/github/callback"
@auth_bp.route("/github/login")
def github_login():
next_url = sanitize_next_url(request.args.get("next"))
if next_url:
redirect_uri = _redirect_uri + "?next=" + encode_url(next_url)
else:
redirect_uri = _redirect_uri
github = OAuth2Session(
GITHUB_CLIENT_ID, scope=["user:email"], redirect_uri=redirect_uri
)
authorization_url, state = github.authorization_url(_authorization_base_url)
# State is used to prevent CSRF, keep this for later.
session["oauth_state"] = state
return redirect(authorization_url)
@auth_bp.route("/github/callback")
def github_callback():
# user clicks on cancel
if "error" in request.args:
flash("Please use another sign in method then", "warning")
return redirect("/")
github = OAuth2Session(
GITHUB_CLIENT_ID,
state=session["oauth_state"],
scope=["user:email"],
redirect_uri=_redirect_uri,
)
github.fetch_token(
_token_url,
client_secret=GITHUB_CLIENT_SECRET,
authorization_response=request.url,
)
# a dict with "name", "login"
github_user_data = github.get("https://api.github.com/user").json()
# return list of emails
# {
# 'email': 'abcd@gmail.com',
# 'primary': False,
# 'verified': True,
# 'visibility': None
# }
emails = github.get("https://api.github.com/user/emails").json()
# only take the primary email
email = None
for e in emails:
if e.get("verified") and e.get("primary"):
email = e.get("email")
break
if not email:
LOG.e(f"cannot get email for github user {github_user_data} {emails}")
flash(
"Cannot get a valid email from Github, please another way to login/sign up",
"error",
)
return redirect(url_for("auth.login"))
email = sanitize_email(email)
user = User.get_by(email=email)
if not user:
flash(
"Sorry you cannot sign up via Github, please use email/password sign-up instead",
"error",
)
return redirect(url_for("auth.register"))
if not SocialAuth.get_by(user_id=user.id, social="github"):
SocialAuth.create(user_id=user.id, social="github")
Session.commit()
# The activation link contains the original page, for ex authorize page
next_url = sanitize_next_url(request.args.get("next")) if request.args else None
return after_login(user, next_url)

View File

@ -0,0 +1,125 @@
from flask import request, session, redirect, flash, url_for
from requests_oauthlib import OAuth2Session
from app import s3
from app.auth.base import auth_bp
from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
from app.db import Session
from app.log import LOG
from app.models import User, File, SocialAuth
from app.utils import random_string, sanitize_email
from .login_utils import after_login
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
_token_url = "https://www.googleapis.com/oauth2/v4/token"
_scope = [
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile",
"openid",
]
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
_redirect_uri = URL + "/auth/google/callback"
@auth_bp.route("/google/login")
def google_login():
# to avoid flask-login displaying the login error message
session.pop("_flashes", None)
next_url = request.args.get("next")
# Google does not allow to append param to redirect_url
# we need to pass the next url by session
if next_url:
session["google_next_url"] = next_url
google = OAuth2Session(GOOGLE_CLIENT_ID, scope=_scope, redirect_uri=_redirect_uri)
authorization_url, state = google.authorization_url(_authorization_base_url)
# State is used to prevent CSRF, keep this for later.
session["oauth_state"] = state
return redirect(authorization_url)
@auth_bp.route("/google/callback")
def google_callback():
# user clicks on cancel
if "error" in request.args:
flash("please use another sign in method then", "warning")
return redirect("/")
google = OAuth2Session(
GOOGLE_CLIENT_ID,
# some how Google Login fails with oauth_state KeyError
# state=session["oauth_state"],
scope=_scope,
redirect_uri=_redirect_uri,
)
google.fetch_token(
_token_url,
client_secret=GOOGLE_CLIENT_SECRET,
authorization_response=request.url,
)
# Fetch a protected resource, i.e. user profile
# {
# "email": "abcd@gmail.com",
# "family_name": "First name",
# "given_name": "Last name",
# "id": "1234",
# "locale": "en",
# "name": "First Last",
# "picture": "http://profile.jpg",
# "verified_email": true
# }
google_user_data = google.get(
"https://www.googleapis.com/oauth2/v1/userinfo"
).json()
email = sanitize_email(google_user_data["email"])
user = User.get_by(email=email)
picture_url = google_user_data.get("picture")
if user:
if picture_url and not user.profile_picture_id:
LOG.d("set user profile picture to %s", picture_url)
file = create_file_from_url(user, picture_url)
user.profile_picture_id = file.id
Session.commit()
else:
flash(
"Sorry you cannot sign up via Google, please use email/password sign-up instead",
"error",
)
return redirect(url_for("auth.register"))
next_url = None
# The activation link contains the original page, for ex authorize page
if "google_next_url" in session:
next_url = session["google_next_url"]
LOG.d("redirect user to %s", next_url)
# reset the next_url to avoid user getting redirected at each login :)
session.pop("google_next_url", None)
if not SocialAuth.get_by(user_id=user.id, social="google"):
SocialAuth.create(user_id=user.id, social="google")
Session.commit()
return after_login(user, next_url)
def create_file_from_url(user, url) -> File:
file_path = random_string(30)
file = File.create(path=file_path, user_id=user.id)
s3.upload_from_url(url, file_path)
Session.flush()
LOG.d("upload file %s to s3", file)
return file

View File

@ -0,0 +1,74 @@
from flask import request, render_template, redirect, url_for, flash, g
from flask_login import current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import CONNECT_WITH_PROTON
from app.events.auth_event import LoginEvent
from app.extensions import limiter
from app.log import LOG
from app.models import User
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
class LoginForm(FlaskForm):
email = StringField("Email", validators=[validators.DataRequired()])
password = StringField("Password", validators=[validators.DataRequired()])
@auth_bp.route("/login", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def login():
next_url = sanitize_next_url(request.args.get("next"))
if current_user.is_authenticated:
if next_url:
LOG.d("user is already authenticated, redirect to %s", next_url)
return redirect(next_url)
else:
LOG.d("user is already authenticated, redirect to dashboard")
return redirect(url_for("dashboard.index"))
form = LoginForm(request.form)
show_resend_activation = False
if form.validate_on_submit():
email = sanitize_email(form.email.data)
canonical_email = canonicalize_email(email)
user = User.get_by(email=email) or User.get_by(email=canonical_email)
if not user or not user.check_password(form.password.data):
# Trigger rate limiter
g.deduct_limit = True
form.password.data = None
flash("Email or password incorrect", "error")
LoginEvent(LoginEvent.ActionType.failed).send()
elif user.disabled:
flash(
"Your account is disabled. Please contact SimpleLogin team to re-enable your account.",
"error",
)
LoginEvent(LoginEvent.ActionType.disabled_login).send()
elif not user.activated:
show_resend_activation = True
flash(
"Please check your inbox for the activation email. You can also have this email re-sent",
"error",
)
LoginEvent(LoginEvent.ActionType.not_activated).send()
else:
LoginEvent(LoginEvent.ActionType.success).send()
return after_login(user, next_url)
return render_template(
"auth/login.html",
form=form,
next_url=next_url,
show_resend_activation=show_resend_activation,
connect_with_proton=CONNECT_WITH_PROTON,
)

View File

@ -0,0 +1,68 @@
from time import time
from typing import Optional
from flask import session, redirect, url_for, request
from flask_login import login_user
from app.config import MFA_USER_ID
from app.log import LOG
from app.models import Referral
def after_login(user, next_url, login_from_proton: bool = False):
"""
Redirect to the correct page after login.
If the user is logged in with Proton, do not look at fido nor otp
If user enables MFA: redirect user to MFA page
Otherwise redirect to dashboard page if no next_url
"""
if not login_from_proton:
if user.fido_enabled():
# Use the same session for FIDO so that we can easily
# switch between these two 2FA option
session[MFA_USER_ID] = user.id
if next_url:
return redirect(url_for("auth.fido", next=next_url))
else:
return redirect(url_for("auth.fido"))
elif user.enable_otp:
session[MFA_USER_ID] = user.id
if next_url:
return redirect(url_for("auth.mfa", next=next_url))
else:
return redirect(url_for("auth.mfa"))
LOG.d("log user %s in", user)
login_user(user)
session["sudo_time"] = int(time())
# User comes to login page from another page
if next_url:
LOG.d("redirect user to %s", next_url)
return redirect(next_url)
else:
LOG.d("redirect user to dashboard")
return redirect(url_for("dashboard.index"))
# name of the cookie that stores the referral code
_REFERRAL_COOKIE = "slref"
def get_referral() -> Optional[Referral]:
"""Get the eventual referral stored in cookie"""
# whether user arrives via a referral
referral = None
if request.cookies:
ref_code = request.cookies.get(_REFERRAL_COOKIE)
referral = Referral.get_by(code=ref_code)
if not referral:
if "slref" in session:
ref_code = session["slref"]
referral = Referral.get_by(code=ref_code)
if referral:
LOG.d("referral found %s", referral)
return referral

View File

@ -0,0 +1,17 @@
from flask import redirect, url_for, flash, make_response
from app.auth.base import auth_bp
from app.config import SESSION_COOKIE_NAME
from app.session import logout_session
@auth_bp.route("/logout")
def logout():
logout_session()
flash("You are logged out", "success")
response = make_response(redirect(url_for("auth.login")))
response.delete_cookie(SESSION_COOKIE_NAME)
response.delete_cookie("mfa")
response.delete_cookie("dark-mode")
return response

107
app/app/auth/views/mfa.py Normal file
View File

@ -0,0 +1,107 @@
import pyotp
from flask import (
render_template,
redirect,
url_for,
flash,
session,
make_response,
request,
g,
)
from flask_login import login_user
from flask_wtf import FlaskForm
from wtforms import BooleanField, StringField, validators
from app.auth.base import auth_bp
from app.config import MFA_USER_ID, URL
from app.db import Session
from app.email_utils import send_invalid_totp_login_email
from app.extensions import limiter
from app.models import User, MfaBrowser
from app.utils import sanitize_next_url
class OtpTokenForm(FlaskForm):
token = StringField("Token", validators=[validators.DataRequired()])
remember = BooleanField(
"attr", default=False, description="Remember this browser for 30 days"
)
@auth_bp.route("/mfa", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def mfa():
# passed from login page
user_id = session.get(MFA_USER_ID)
# user access this page directly without passing by login page
if not user_id:
flash("Unknown error, redirect back to main page", "warning")
return redirect(url_for("auth.login"))
user = User.get(user_id)
if not (user and user.enable_otp):
flash("Only user with MFA enabled should go to this page", "warning")
return redirect(url_for("auth.login"))
otp_token_form = OtpTokenForm()
next_url = sanitize_next_url(request.args.get("next"))
if request.cookies.get("mfa"):
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
if browser and not browser.is_expired() and browser.user_id == user.id:
login_user(user)
flash(f"Welcome back!", "success")
# Redirect user to correct page
return redirect(next_url or url_for("dashboard.index"))
else:
# Trigger rate limiter
g.deduct_limit = True
if otp_token_form.validate_on_submit():
totp = pyotp.TOTP(user.otp_secret)
token = otp_token_form.token.data.replace(" ", "")
if totp.verify(token, valid_window=2) and user.last_otp != token:
del session[MFA_USER_ID]
user.last_otp = token
Session.commit()
login_user(user)
flash(f"Welcome back!", "success")
# Redirect user to correct page
response = make_response(redirect(next_url or url_for("dashboard.index")))
if otp_token_form.remember.data:
browser = MfaBrowser.create_new(user=user)
Session.commit()
response.set_cookie(
"mfa",
value=browser.token,
expires=browser.expires.datetime,
secure=True if URL.startswith("https") else False,
httponly=True,
samesite="Lax",
)
return response
else:
flash("Incorrect token", "warning")
# Trigger rate limiter
g.deduct_limit = True
otp_token_form.token.data = None
send_invalid_totp_login_email(user, "TOTP")
return render_template(
"auth/mfa.html",
otp_token_form=otp_token_form,
enable_fido=(user.fido_enabled()),
next_url=next_url,
)

View File

@ -0,0 +1,190 @@
import requests
from flask import request, session, redirect, flash, url_for
from flask_limiter.util import get_remote_address
from flask_login import current_user
from requests_oauthlib import OAuth2Session
from typing import Optional
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import (
PROTON_BASE_URL,
PROTON_CLIENT_ID,
PROTON_CLIENT_SECRET,
PROTON_EXTRA_HEADER_NAME,
PROTON_EXTRA_HEADER_VALUE,
PROTON_VALIDATE_CERTS,
URL,
)
from app.log import LOG
from app.models import ApiKey, User
from app.proton.proton_client import HttpProtonClient, convert_access_token
from app.proton.proton_callback_handler import (
ProtonCallbackHandler,
Action,
)
from app.proton.utils import get_proton_partner
from app.utils import sanitize_next_url, sanitize_scheme
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
_token_url = PROTON_BASE_URL + "/oauth/token"
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
_redirect_uri = URL + "/auth/proton/callback"
SESSION_ACTION_KEY = "oauth_action"
SESSION_STATE_KEY = "oauth_state"
DEFAULT_SCHEME = "auth.simplelogin"
def get_api_key_for_user(user: User) -> str:
ak = ApiKey.create(
user_id=user.id,
name="Created via Login with Proton on mobile app",
commit=True,
)
return ak.code
def extract_action() -> Optional[Action]:
action = request.args.get("action")
if action is not None:
if action == "link":
return Action.Link
elif action == "login":
return Action.Login
else:
LOG.w(f"Unknown action received: {action}")
return None
return Action.Login
def get_action_from_state() -> Action:
oauth_action = session[SESSION_ACTION_KEY]
if oauth_action == Action.Login.value:
return Action.Login
elif oauth_action == Action.Link.value:
return Action.Link
raise Exception(f"Unknown action in state: {oauth_action}")
@auth_bp.route("/proton/login")
def proton_login():
if PROTON_CLIENT_ID is None or PROTON_CLIENT_SECRET is None:
return redirect(url_for("auth.login"))
action = extract_action()
if action is None:
return redirect(url_for("auth.login"))
if action == Action.Link and not current_user.is_authenticated:
return redirect(url_for("auth.login"))
next_url = sanitize_next_url(request.args.get("next"))
if next_url:
session["oauth_next"] = next_url
elif "oauth_next" in session:
del session["oauth_next"]
scheme = sanitize_scheme(request.args.get("scheme"))
if scheme:
session["oauth_scheme"] = scheme
elif "oauth_scheme" in session:
del session["oauth_scheme"]
mode = request.args.get("mode", "session")
if mode == "apikey":
session["oauth_mode"] = "apikey"
else:
session["oauth_mode"] = "session"
proton = OAuth2Session(PROTON_CLIENT_ID, redirect_uri=_redirect_uri)
authorization_url, state = proton.authorization_url(_authorization_base_url)
# State is used to prevent CSRF, keep this for later.
session[SESSION_STATE_KEY] = state
session[SESSION_ACTION_KEY] = action.value
return redirect(authorization_url)
@auth_bp.route("/proton/callback")
def proton_callback():
if SESSION_STATE_KEY not in session or SESSION_STATE_KEY not in session:
flash("Invalid state, please retry", "error")
return redirect(url_for("auth.login"))
if PROTON_CLIENT_ID is None or PROTON_CLIENT_SECRET is None:
return redirect(url_for("auth.login"))
# user clicks on cancel
if "error" in request.args:
flash("Please use another sign in method then", "warning")
return redirect("/")
proton = OAuth2Session(
PROTON_CLIENT_ID,
state=session[SESSION_STATE_KEY],
redirect_uri=_redirect_uri,
)
def check_status_code(response: requests.Response) -> requests.Response:
if response.status_code != 200:
raise Exception(
f"Bad Proton API response [status={response.status_code}]: {response.json()}"
)
return response
proton.register_compliance_hook("access_token_response", check_status_code)
headers = None
if PROTON_EXTRA_HEADER_NAME and PROTON_EXTRA_HEADER_VALUE:
headers = {PROTON_EXTRA_HEADER_NAME: PROTON_EXTRA_HEADER_VALUE}
try:
token = proton.fetch_token(
_token_url,
client_secret=PROTON_CLIENT_SECRET,
authorization_response=request.url,
verify=PROTON_VALIDATE_CERTS,
method="GET",
include_client_id=True,
headers=headers,
)
except Exception as e:
LOG.warning(f"Error fetching Proton token: {e}")
flash("There was an error in the login process", "error")
return redirect(url_for("auth.login"))
credentials = convert_access_token(token["access_token"])
action = get_action_from_state()
proton_client = HttpProtonClient(
PROTON_BASE_URL, credentials, get_remote_address(), verify=PROTON_VALIDATE_CERTS
)
handler = ProtonCallbackHandler(proton_client)
proton_partner = get_proton_partner()
next_url = session.get("oauth_next")
if action == Action.Login:
res = handler.handle_login(proton_partner)
elif action == Action.Link:
res = handler.handle_link(current_user, proton_partner)
else:
raise Exception(f"Unknown Action: {action.name}")
if res.flash_message is not None:
flash(res.flash_message, res.flash_category)
oauth_scheme = session.get("oauth_scheme")
if session.get("oauth_mode", "session") == "apikey":
apikey = get_api_key_for_user(res.user)
scheme = oauth_scheme or DEFAULT_SCHEME
return redirect(f"{scheme}:///login?apikey={apikey}")
if res.redirect_to_login:
return redirect(url_for("auth.login"))
if next_url and next_url[0] == "/" and oauth_scheme:
next_url = f"{oauth_scheme}://{next_url}"
redirect_url = next_url or res.redirect
return after_login(res.user, redirect_url, login_from_proton=True)

View File

@ -0,0 +1,75 @@
import arrow
from flask import request, render_template, redirect, url_for, flash, session, g
from flask_login import login_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.config import MFA_USER_ID
from app.db import Session
from app.email_utils import send_invalid_totp_login_email
from app.extensions import limiter
from app.log import LOG
from app.models import User, RecoveryCode
from app.utils import sanitize_next_url
class RecoveryForm(FlaskForm):
code = StringField("Code", validators=[validators.DataRequired()])
@auth_bp.route("/recovery", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def recovery_route():
# passed from login page
user_id = session.get(MFA_USER_ID)
# user access this page directly without passing by login page
if not user_id:
flash("Unknown error, redirect back to main page", "warning")
return redirect(url_for("auth.login"))
user = User.get(user_id)
if not user.two_factor_authentication_enabled():
flash("Only user with MFA enabled should go to this page", "warning")
return redirect(url_for("auth.login"))
recovery_form = RecoveryForm()
next_url = sanitize_next_url(request.args.get("next"))
if recovery_form.validate_on_submit():
code = recovery_form.code.data
recovery_code = RecoveryCode.find_by_user_code(user, code)
if recovery_code:
if recovery_code.used:
# Trigger rate limiter
g.deduct_limit = True
flash("Code already used", "error")
else:
del session[MFA_USER_ID]
login_user(user)
flash(f"Welcome back!", "success")
recovery_code.used = True
recovery_code.used_at = arrow.now()
Session.commit()
# User comes to login page from another page
if next_url:
LOG.d("redirect user to %s", next_url)
return redirect(next_url)
else:
LOG.d("redirect user to dashboard")
return redirect(url_for("dashboard.index"))
else:
# Trigger rate limiter
g.deduct_limit = True
flash("Incorrect code", "error")
send_invalid_totp_login_email(user, "recovery")
return render_template("auth/recovery.html", recovery_form=recovery_form)

View File

@ -0,0 +1,128 @@
import requests
from flask import request, flash, render_template, redirect, url_for
from flask_login import current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app import email_utils, config
from app.auth.base import auth_bp
from app.config import CONNECT_WITH_PROTON
from app.auth.views.login_utils import get_referral
from app.config import URL, HCAPTCHA_SECRET, HCAPTCHA_SITEKEY
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
personal_email_already_used,
)
from app.events.auth_event import RegisterEvent
from app.log import LOG
from app.models import User, ActivationCode, DailyMetric
from app.utils import random_string, encode_url, sanitize_email, canonicalize_email
class RegisterForm(FlaskForm):
email = StringField("Email", validators=[validators.DataRequired()])
password = StringField(
"Password",
validators=[validators.DataRequired(), validators.Length(min=8, max=100)],
)
@auth_bp.route("/register", methods=["GET", "POST"])
def register():
if current_user.is_authenticated:
LOG.d("user is already authenticated, redirect to dashboard")
flash("You are already logged in", "warning")
return redirect(url_for("dashboard.index"))
if config.DISABLE_REGISTRATION:
flash("Registration is closed", "error")
return redirect(url_for("auth.login"))
form = RegisterForm(request.form)
next_url = request.args.get("next")
if form.validate_on_submit():
# only check if hcaptcha is enabled
if HCAPTCHA_SECRET:
# check with hCaptcha
token = request.form.get("h-captcha-response")
params = {"secret": HCAPTCHA_SECRET, "response": token}
hcaptcha_res = requests.post(
"https://hcaptcha.com/siteverify", data=params
).json()
# return something like
# {'success': True,
# 'challenge_ts': '2020-07-23T10:03:25',
# 'hostname': '127.0.0.1'}
if not hcaptcha_res["success"]:
LOG.w(
"User put wrong captcha %s %s",
form.email.data,
hcaptcha_res,
)
flash("Wrong Captcha", "error")
RegisterEvent(RegisterEvent.ActionType.catpcha_failed).send()
return render_template(
"auth/register.html",
form=form,
next_url=next_url,
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
)
email = canonicalize_email(form.email.data)
if not email_can_be_used_as_mailbox(email):
flash("You cannot use this email address as your personal inbox.", "error")
RegisterEvent(RegisterEvent.ActionType.email_in_use).send()
else:
sanitized_email = sanitize_email(form.email.data)
if personal_email_already_used(email) or personal_email_already_used(
sanitized_email
):
flash(f"Email {email} already used", "error")
RegisterEvent(RegisterEvent.ActionType.email_in_use).send()
else:
LOG.d("create user %s", email)
user = User.create(
email=email,
name=form.email.data,
password=form.password.data,
referral=get_referral(),
)
Session.commit()
try:
send_activation_email(user, next_url)
RegisterEvent(RegisterEvent.ActionType.success).send()
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
1
)
Session.commit()
except Exception:
flash("Invalid email, are you sure the email is correct?", "error")
RegisterEvent(RegisterEvent.ActionType.invalid_email).send()
return redirect(url_for("auth.register"))
return render_template("auth/register_waiting_activation.html")
return render_template(
"auth/register.html",
form=form,
next_url=next_url,
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
connect_with_proton=CONNECT_WITH_PROTON,
)
def send_activation_email(user, next_url):
# the activation code is valid for 1h
activation = ActivationCode.create(user_id=user.id, code=random_string(30))
Session.commit()
# Send user activation email
activation_link = f"{URL}/auth/activate?code={activation.code}"
if next_url:
LOG.d("redirect user to %s after activation", next_url)
activation_link = activation_link + "&next=" + encode_url(next_url)
email_utils.send_activation_email(user.email, activation_link)

View File

@ -0,0 +1,44 @@
from flask import request, flash, render_template, redirect, url_for
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.auth.views.register import send_activation_email
from app.extensions import limiter
from app.log import LOG
from app.models import User
from app.utils import sanitize_email, canonicalize_email
class ResendActivationForm(FlaskForm):
email = StringField("Email", validators=[validators.DataRequired()])
@auth_bp.route("/resend_activation", methods=["GET", "POST"])
@limiter.limit("10/hour")
def resend_activation():
form = ResendActivationForm(request.form)
if form.validate_on_submit():
email = sanitize_email(form.email.data)
canonical_email = canonicalize_email(email)
user = User.get_by(email=email) or User.get_by(email=canonical_email)
if not user:
flash("There is no such email", "warning")
return render_template("auth/resend_activation.html", form=form)
if user.activated:
flash("Your account was already activated, please login", "success")
return redirect(url_for("auth.login"))
# user is not activated
LOG.d("user %s is not activated", user)
flash(
"An activation email has been sent to you. Please check your inbox/spam folder.",
"warning",
)
send_activation_email(user, request.args.get("next"))
return render_template("auth/register_waiting_activation.html")
return render_template("auth/resend_activation.html", form=form)

View File

@ -0,0 +1,75 @@
import uuid
from flask import request, flash, render_template, url_for, g
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.db import Session
from app.extensions import limiter
from app.models import ResetPasswordCode
class ResetPasswordForm(FlaskForm):
password = StringField(
"Password",
validators=[validators.DataRequired(), validators.Length(min=8, max=100)],
)
@auth_bp.route("/reset_password", methods=["GET", "POST"])
@limiter.limit(
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
)
def reset_password():
form = ResetPasswordForm(request.form)
reset_password_code_str = request.args.get("code")
reset_password_code: ResetPasswordCode = ResetPasswordCode.get_by(
code=reset_password_code_str
)
if not reset_password_code:
# Trigger rate limiter
g.deduct_limit = True
error = (
"The reset password link can be used only once. "
"Please request a new link to reset password."
)
return render_template("auth/reset_password.html", form=form, error=error)
if reset_password_code.is_expired():
error = "The link has been already expired. Please make a new request of the reset password link"
return render_template("auth/reset_password.html", form=form, error=error)
if form.validate_on_submit():
user = reset_password_code.user
new_password = form.password.data
# avoid user reusing the old password
if user.check_password(new_password):
error = "You cannot reuse the same password"
return render_template("auth/reset_password.html", form=form, error=error)
user.set_password(new_password)
flash("Your new password has been set", "success")
# this can be served to activate user too
user.activated = True
# remove the reset password code
ResetPasswordCode.delete(reset_password_code.id)
# change the alternative_id to log user out on other browsers
user.alternative_id = str(uuid.uuid4())
Session.commit()
# do not use login_user(user) here
# to make sure user needs to go through MFA if enabled
return after_login(user, url_for("dashboard.index"))
return render_template("auth/reset_password.html", form=form)

View File

@ -0,0 +1,14 @@
from flask import render_template, redirect, url_for
from flask_login import current_user
from app.auth.base import auth_bp
from app.log import LOG
@auth_bp.route("/social", methods=["GET", "POST"])
def social():
if current_user.is_authenticated:
LOG.d("user is already authenticated, redirect to dashboard")
return redirect(url_for("dashboard.index"))
return render_template("auth/social.html")

2
app/app/build_info.py Normal file
View File

@ -0,0 +1,2 @@
SHA1 = "dev"
BUILD_TIME = "1652365083"

529
app/app/config.py Normal file
View File

@ -0,0 +1,529 @@
import os
import random
import socket
import string
from ast import literal_eval
from typing import Callable, List
from urllib.parse import urlparse
from dotenv import load_dotenv
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def get_abs_path(file_path: str):
"""append ROOT_DIR for relative path"""
# Already absolute path
if file_path.startswith("/"):
return file_path
else:
return os.path.join(ROOT_DIR, file_path)
def sl_getenv(env_var: str, default_factory: Callable = None):
"""
Get env value, convert into Python object
Args:
env_var (str): env var, example: SL_DB
default_factory: returns value if this env var is not set.
"""
value = os.getenv(env_var)
if value is None:
return default_factory()
return literal_eval(value)
config_file = os.environ.get("CONFIG")
if config_file:
config_file = get_abs_path(config_file)
print("load config file", config_file)
load_dotenv(get_abs_path(config_file))
else:
load_dotenv()
COLOR_LOG = "COLOR_LOG" in os.environ
# Allow user to have 1 year of premium: set the expiration_date to 1 year more
PROMO_CODE = "SIMPLEISBETTER"
# Server url
URL = os.environ["URL"]
print(">>> URL:", URL)
# Calculate RP_ID for WebAuthn
RP_ID = urlparse(URL).hostname
SENTRY_DSN = os.environ.get("SENTRY_DSN")
# can use another sentry project for the front-end to avoid noises
SENTRY_FRONT_END_DSN = os.environ.get("SENTRY_FRONT_END_DSN") or SENTRY_DSN
# Email related settings
NOT_SEND_EMAIL = "NOT_SEND_EMAIL" in os.environ
EMAIL_DOMAIN = os.environ["EMAIL_DOMAIN"].lower()
SUPPORT_EMAIL = os.environ["SUPPORT_EMAIL"]
SUPPORT_NAME = os.environ.get("SUPPORT_NAME", "Son from SimpleLogin")
ADMIN_EMAIL = os.environ.get("ADMIN_EMAIL")
# to receive monitoring daily report
MONITORING_EMAIL = os.environ.get("MONITORING_EMAIL")
# VERP: mail_from set to BOUNCE_PREFIX + email_log.id + BOUNCE_SUFFIX
BOUNCE_PREFIX = os.environ.get("BOUNCE_PREFIX") or "bounce+"
BOUNCE_SUFFIX = os.environ.get("BOUNCE_SUFFIX") or f"+@{EMAIL_DOMAIN}"
# Used for VERP during reply phase. It's similar to BOUNCE_PREFIX.
# It's needed when sending emails from custom domain to respect DMARC.
# BOUNCE_PREFIX_FOR_REPLY_PHASE should never be used in any existing alias
# and can't be used for creating a new alias on custom domain
# Note BOUNCE_PREFIX_FOR_REPLY_PHASE doesn't have the trailing plus sign (+) as BOUNCE_PREFIX
BOUNCE_PREFIX_FOR_REPLY_PHASE = (
os.environ.get("BOUNCE_PREFIX_FOR_REPLY_PHASE") or "bounce_reply"
)
# VERP for transactional email: mail_from set to BOUNCE_PREFIX + email_log.id + BOUNCE_SUFFIX
TRANSACTIONAL_BOUNCE_PREFIX = (
os.environ.get("TRANSACTIONAL_BOUNCE_PREFIX") or "transactional+"
)
TRANSACTIONAL_BOUNCE_SUFFIX = (
os.environ.get("TRANSACTIONAL_BOUNCE_SUFFIX") or f"+@{EMAIL_DOMAIN}"
)
try:
MAX_NB_EMAIL_FREE_PLAN = int(os.environ["MAX_NB_EMAIL_FREE_PLAN"])
except Exception:
print("MAX_NB_EMAIL_FREE_PLAN is not set, use 5 as default value")
MAX_NB_EMAIL_FREE_PLAN = 5
MAX_NB_EMAIL_OLD_FREE_PLAN = int(os.environ.get("MAX_NB_EMAIL_OLD_FREE_PLAN", 15))
# maximum number of directory a premium user can create
MAX_NB_DIRECTORY = 50
MAX_NB_SUBDOMAIN = 5
ENFORCE_SPF = "ENFORCE_SPF" in os.environ
# override postfix server locally
# use 240.0.0.1 here instead of 10.0.0.1 as existing SL instances use the 240.0.0.0 network
POSTFIX_SERVER = os.environ.get("POSTFIX_SERVER", "240.0.0.1")
DISABLE_REGISTRATION = "DISABLE_REGISTRATION" in os.environ
# allow using a different postfix port, useful when developing locally
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", 25))
# Use port 587 instead of 25 when sending emails through Postfix
# Useful when calling Postfix from an external network
POSTFIX_SUBMISSION_TLS = "POSTFIX_SUBMISSION_TLS" in os.environ
POSTFIX_TIMEOUT = os.environ.get("POSTFIX_TIMEOUT", 3)
# ["domain1.com", "domain2.com"]
OTHER_ALIAS_DOMAINS = sl_getenv("OTHER_ALIAS_DOMAINS", list)
OTHER_ALIAS_DOMAINS = [d.lower().strip() for d in OTHER_ALIAS_DOMAINS]
# List of domains user can use to create alias
if "ALIAS_DOMAINS" in os.environ:
ALIAS_DOMAINS = sl_getenv("ALIAS_DOMAINS") # ["domain1.com", "domain2.com"]
else:
ALIAS_DOMAINS = OTHER_ALIAS_DOMAINS + [EMAIL_DOMAIN]
ALIAS_DOMAINS = [d.lower().strip() for d in ALIAS_DOMAINS]
# ["domain1.com", "domain2.com"]
PREMIUM_ALIAS_DOMAINS = sl_getenv("PREMIUM_ALIAS_DOMAINS", list)
PREMIUM_ALIAS_DOMAINS = [d.lower().strip() for d in PREMIUM_ALIAS_DOMAINS]
# the alias domain used when creating the first alias for user
FIRST_ALIAS_DOMAIN = os.environ.get("FIRST_ALIAS_DOMAIN") or EMAIL_DOMAIN
# list of (priority, email server)
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
# disable the alias suffix, i.e. the ".random_word" part
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
# the email address that receives all unsubscription request
UNSUBSCRIBER = os.environ.get("UNSUBSCRIBER")
# due to a typo, both UNSUBSCRIBER and OLD_UNSUBSCRIBER are supported
OLD_UNSUBSCRIBER = os.environ.get("OLD_UNSUBSCRIBER")
DKIM_SELECTOR = b"dkim"
DKIM_PRIVATE_KEY = None
if "DKIM_PRIVATE_KEY_PATH" in os.environ:
DKIM_PRIVATE_KEY_PATH = get_abs_path(os.environ["DKIM_PRIVATE_KEY_PATH"])
with open(DKIM_PRIVATE_KEY_PATH) as f:
DKIM_PRIVATE_KEY = f.read()
# Database
DB_URI = os.environ["DB_URI"]
DB_CONN_NAME = os.environ.get("DB_CONN_NAME", "webapp")
# Flask secret
FLASK_SECRET = os.environ["FLASK_SECRET"]
if not FLASK_SECRET:
raise RuntimeError("FLASK_SECRET is empty. Please define it.")
SESSION_COOKIE_NAME = "slapp"
MAILBOX_SECRET = FLASK_SECRET + "mailbox"
CUSTOM_ALIAS_SECRET = FLASK_SECRET + "custom_alias"
UNSUBSCRIBE_SECRET = FLASK_SECRET + "unsub"
# AWS
AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
BUCKET = os.environ.get("BUCKET")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
# Paddle
try:
PADDLE_VENDOR_ID = int(os.environ["PADDLE_VENDOR_ID"])
PADDLE_MONTHLY_PRODUCT_ID = int(os.environ["PADDLE_MONTHLY_PRODUCT_ID"])
PADDLE_YEARLY_PRODUCT_ID = int(os.environ["PADDLE_YEARLY_PRODUCT_ID"])
except (KeyError, ValueError):
print("Paddle param not set")
PADDLE_VENDOR_ID = -1
PADDLE_MONTHLY_PRODUCT_ID = -1
PADDLE_YEARLY_PRODUCT_ID = -1
# Other Paddle product IDS
PADDLE_MONTHLY_PRODUCT_IDS = sl_getenv("PADDLE_MONTHLY_PRODUCT_IDS", list)
PADDLE_MONTHLY_PRODUCT_IDS.append(PADDLE_MONTHLY_PRODUCT_ID)
PADDLE_YEARLY_PRODUCT_IDS = sl_getenv("PADDLE_YEARLY_PRODUCT_IDS", list)
PADDLE_YEARLY_PRODUCT_IDS.append(PADDLE_YEARLY_PRODUCT_ID)
PADDLE_PUBLIC_KEY_PATH = get_abs_path(
os.environ.get("PADDLE_PUBLIC_KEY_PATH", "local_data/paddle.key.pub")
)
PADDLE_AUTH_CODE = os.environ.get("PADDLE_AUTH_CODE")
PADDLE_COUPON_ID = os.environ.get("PADDLE_COUPON_ID")
# OpenID keys, used to sign id_token
OPENID_PRIVATE_KEY_PATH = get_abs_path(
os.environ.get("OPENID_PRIVATE_KEY_PATH", "local_data/jwtRS256.key")
)
OPENID_PUBLIC_KEY_PATH = get_abs_path(
os.environ.get("OPENID_PUBLIC_KEY_PATH", "local_data/jwtRS256.key.pub")
)
# Used to generate random email
# words.txt is a list of English words and doesn't contain any "bad" word
# words_alpha.txt comes from https://github.com/dwyl/english-words and also contains bad words.
WORDS_FILE_PATH = get_abs_path(
os.environ.get("WORDS_FILE_PATH", "local_data/words.txt")
)
# Used to generate random email
if os.environ.get("GNUPGHOME"):
GNUPGHOME = get_abs_path(os.environ.get("GNUPGHOME"))
else:
letters = string.ascii_lowercase
random_dir_name = "".join(random.choice(letters) for _ in range(20))
GNUPGHOME = f"/tmp/{random_dir_name}"
if not os.path.exists(GNUPGHOME):
os.mkdir(GNUPGHOME, mode=0o700)
print("WARNING: Use a temp directory for GNUPGHOME", GNUPGHOME)
# Github, Google, Facebook client id and secrets
GITHUB_CLIENT_ID = os.environ.get("GITHUB_CLIENT_ID")
GITHUB_CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
GOOGLE_CLIENT_ID = os.environ.get("GOOGLE_CLIENT_ID")
GOOGLE_CLIENT_SECRET = os.environ.get("GOOGLE_CLIENT_SECRET")
FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
PROTON_CLIENT_ID = os.environ.get("PROTON_CLIENT_ID")
PROTON_CLIENT_SECRET = os.environ.get("PROTON_CLIENT_SECRET")
PROTON_BASE_URL = os.environ.get(
"PROTON_BASE_URL", "https://account.protonmail.com/api"
)
PROTON_VALIDATE_CERTS = "PROTON_VALIDATE_CERTS" in os.environ
CONNECT_WITH_PROTON = "CONNECT_WITH_PROTON" in os.environ
PROTON_EXTRA_HEADER_NAME = os.environ.get("PROTON_EXTRA_HEADER_NAME")
PROTON_EXTRA_HEADER_VALUE = os.environ.get("PROTON_EXTRA_HEADER_VALUE")
# in seconds
AVATAR_URL_EXPIRATION = 3600 * 24 * 7 # 1h*24h/d*7d=1week
# session key
MFA_USER_ID = "mfa_user_id"
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
# Job names
JOB_ONBOARDING_1 = "onboarding-1"
JOB_ONBOARDING_2 = "onboarding-2"
JOB_ONBOARDING_3 = "onboarding-3"
JOB_ONBOARDING_4 = "onboarding-4"
JOB_BATCH_IMPORT = "batch-import"
JOB_DELETE_ACCOUNT = "delete-account"
JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
# for pagination
PAGE_LIMIT = 20
# Upload to static/upload instead of s3
LOCAL_FILE_UPLOAD = "LOCAL_FILE_UPLOAD" in os.environ
UPLOAD_DIR = None
# Rate Limiting
# nb max of activity (forward/reply) an alias can have during 1 min
MAX_ACTIVITY_DURING_MINUTE_PER_ALIAS = 10
# nb max of activity (forward/reply) a mailbox can have during 1 min
MAX_ACTIVITY_DURING_MINUTE_PER_MAILBOX = 15
if LOCAL_FILE_UPLOAD:
print("Upload files to local dir")
UPLOAD_DIR = os.path.join(ROOT_DIR, "static/upload")
if not os.path.exists(UPLOAD_DIR):
print("Create upload dir")
os.makedirs(UPLOAD_DIR)
LANDING_PAGE_URL = os.environ.get("LANDING_PAGE_URL") or "https://simplelogin.io"
STATUS_PAGE_URL = os.environ.get("STATUS_PAGE_URL") or "https://status.simplelogin.io"
# Loading PGP keys when mail_handler runs. To be used locally when init_app is not called.
LOAD_PGP_EMAIL_HANDLER = "LOAD_PGP_EMAIL_HANDLER" in os.environ
# Used when querying info on Apple API
# for iOS App
APPLE_API_SECRET = os.environ.get("APPLE_API_SECRET")
# for Mac App
MACAPP_APPLE_API_SECRET = os.environ.get("MACAPP_APPLE_API_SECRET")
# <<<<< ALERT EMAIL >>>>
# maximal number of alerts that can be sent to the same email in 24h
MAX_ALERT_24H = 4
# When a reverse-alias receives emails from un unknown mailbox
ALERT_REVERSE_ALIAS_UNKNOWN_MAILBOX = "reverse_alias_unknown_mailbox"
# When somebody is trying to spoof a reply
ALERT_DMARC_FAILED_REPLY_PHASE = "dmarc_failed_reply_phase"
# When a forwarding email is bounced
ALERT_BOUNCE_EMAIL = "bounce"
ALERT_BOUNCE_EMAIL_REPLY_PHASE = "bounce-when-reply"
# When a forwarding email is detected as spam
ALERT_SPAM_EMAIL = "spam"
# When an email is sent from a mailbox to an alias - a cycle
ALERT_SEND_EMAIL_CYCLE = "cycle"
ALERT_NON_REVERSE_ALIAS_REPLY_PHASE = "non_reverse_alias_reply_phase"
ALERT_FROM_ADDRESS_IS_REVERSE_ALIAS = "from_address_is_reverse_alias"
ALERT_TO_NOREPLY = "to_noreply"
ALERT_SPF = "spf"
ALERT_INVALID_TOTP_LOGIN = "invalid_totp_login"
# when a mailbox is also an alias
# happens when user adds a mailbox with their domain
# then later adds this domain into SimpleLogin
ALERT_MAILBOX_IS_ALIAS = "mailbox_is_alias"
AlERT_WRONG_MX_RECORD_CUSTOM_DOMAIN = "custom_domain_mx_record_issue"
# alert when a new alias is about to be created on a disabled directory
ALERT_DIRECTORY_DISABLED_ALIAS_CREATION = "alert_directory_disabled_alias_creation"
ALERT_COMPLAINT_REPLY_PHASE = "alert_complaint_reply_phase"
ALERT_COMPLAINT_FORWARD_PHASE = "alert_complaint_forward_phase"
ALERT_COMPLAINT_TRANSACTIONAL_PHASE = "alert_complaint_transactional_phase"
ALERT_QUARANTINE_DMARC = "alert_quarantine_dmarc"
ALERT_DUAL_SUBSCRIPTION_WITH_PARTNER = "alert_dual_sub_with_partner"
# <<<<< END ALERT EMAIL >>>>
# Disable onboarding emails
DISABLE_ONBOARDING = "DISABLE_ONBOARDING" in os.environ
HCAPTCHA_SECRET = os.environ.get("HCAPTCHA_SECRET")
HCAPTCHA_SITEKEY = os.environ.get("HCAPTCHA_SITEKEY")
PLAUSIBLE_HOST = os.environ.get("PLAUSIBLE_HOST")
PLAUSIBLE_DOMAIN = os.environ.get("PLAUSIBLE_DOMAIN")
# server host
HOST = socket.gethostname()
SPAMASSASSIN_HOST = os.environ.get("SPAMASSASSIN_HOST")
# by default use a tolerant score
if "MAX_SPAM_SCORE" in os.environ:
MAX_SPAM_SCORE = float(os.environ["MAX_SPAM_SCORE"])
else:
MAX_SPAM_SCORE = 5.5
# use a more restrictive score when replying
if "MAX_REPLY_PHASE_SPAM_SCORE" in os.environ:
MAX_REPLY_PHASE_SPAM_SCORE = float(os.environ["MAX_REPLY_PHASE_SPAM_SCORE"])
else:
MAX_REPLY_PHASE_SPAM_SCORE = 5
PGP_SENDER_PRIVATE_KEY = None
PGP_SENDER_PRIVATE_KEY_PATH = os.environ.get("PGP_SENDER_PRIVATE_KEY_PATH")
if PGP_SENDER_PRIVATE_KEY_PATH:
with open(get_abs_path(PGP_SENDER_PRIVATE_KEY_PATH)) as f:
PGP_SENDER_PRIVATE_KEY = f.read()
# the signer address that signs outgoing encrypted emails
PGP_SIGNER = os.environ.get("PGP_SIGNER")
# emails that have empty From address is sent from this special reverse-alias
NOREPLY = os.environ.get("NOREPLY", f"noreply@{EMAIL_DOMAIN}")
# list of no reply addresses
NOREPLIES = sl_getenv("NOREPLIES", list) or [NOREPLY]
COINBASE_WEBHOOK_SECRET = os.environ.get("COINBASE_WEBHOOK_SECRET")
COINBASE_CHECKOUT_ID = os.environ.get("COINBASE_CHECKOUT_ID")
COINBASE_API_KEY = os.environ.get("COINBASE_API_KEY")
try:
COINBASE_YEARLY_PRICE = float(os.environ["COINBASE_YEARLY_PRICE"])
except Exception:
COINBASE_YEARLY_PRICE = 30.00
ALIAS_LIMIT = os.environ.get("ALIAS_LIMIT") or "100/day;50/hour;5/minute"
ENABLE_SPAM_ASSASSIN = "ENABLE_SPAM_ASSASSIN" in os.environ
ALIAS_RANDOM_SUFFIX_LENGTH = int(os.environ.get("ALIAS_RAND_SUFFIX_LENGTH", 5))
try:
HIBP_SCAN_INTERVAL_DAYS = int(os.environ.get("HIBP_SCAN_INTERVAL_DAYS"))
except Exception:
HIBP_SCAN_INTERVAL_DAYS = 7
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
POSTMASTER = os.environ.get("POSTMASTER")
# store temporary files, especially for debugging
TEMP_DIR = os.environ.get("TEMP_DIR")
# Store unsent emails
SAVE_UNSENT_DIR = os.environ.get("SAVE_UNSENT_DIR")
if SAVE_UNSENT_DIR and not os.path.isdir(SAVE_UNSENT_DIR):
try:
os.makedirs(SAVE_UNSENT_DIR)
except FileExistsError:
pass
# enable the alias automation disable: an alias can be automatically disabled if it has too many bounces
ALIAS_AUTOMATIC_DISABLE = "ALIAS_AUTOMATIC_DISABLE" in os.environ
# whether the DKIM signing is handled by Rspamd
RSPAMD_SIGN_DKIM = "RSPAMD_SIGN_DKIM" in os.environ
TWILIO_AUTH_TOKEN = os.environ.get("TWILIO_AUTH_TOKEN")
PHONE_PROVIDER_1_HEADER = "X-SimpleLogin-Secret"
PHONE_PROVIDER_1_SECRET = os.environ.get("PHONE_PROVIDER_1_SECRET")
PHONE_PROVIDER_2_HEADER = os.environ.get("PHONE_PROVIDER_2_HEADER")
PHONE_PROVIDER_2_SECRET = os.environ.get("PHONE_PROVIDER_2_SECRET")
ZENDESK_HOST = os.environ.get("ZENDESK_HOST")
ZENDESK_API_TOKEN = os.environ.get("ZENDESK_API_TOKEN")
ZENDESK_ENABLED = "ZENDESK_ENABLED" in os.environ
DMARC_CHECK_ENABLED = "DMARC_CHECK_ENABLED" in os.environ
# Bounces can happen after 5 days
VERP_MESSAGE_LIFETIME = 5 * 86400
VERP_PREFIX = os.environ.get("VERP_PREFIX") or "sl"
# Generate with python3 -c 'import secrets; print(secrets.token_hex(28))'
VERP_EMAIL_SECRET = os.environ.get("VERP_EMAIL_SECRET") or (
FLASK_SECRET + "pleasegenerateagoodrandomtoken"
)
if len(VERP_EMAIL_SECRET) < 32:
raise RuntimeError(
"Please, set VERP_EMAIL_SECRET to a random string at least 32 chars long"
)
ALIAS_TRANSFER_TOKEN_SECRET = os.environ.get("ALIAS_TRANSFER_TOKEN_SECRET") or (
FLASK_SECRET + "aliastransfertoken"
)
def get_allowed_redirect_domains() -> List[str]:
allowed_domains = sl_getenv("ALLOWED_REDIRECT_DOMAINS", list)
if allowed_domains:
return allowed_domains
parsed_url = urlparse(URL)
return [parsed_url.hostname]
ALLOWED_REDIRECT_DOMAINS = get_allowed_redirect_domains()
def setup_nameservers():
nameservers = os.environ.get("NAMESERVERS", "1.1.1.1")
return nameservers.split(",")
NAMESERVERS = setup_nameservers()
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = False
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
FLASK_SECRET + "partnerapitoken"
)
JOB_MAX_ATTEMPTS = 5
JOB_TAKEN_RETRY_WAIT_MINS = 30
# MEM_STORE
MEM_STORE_URI = os.environ.get("MEM_STORE_URI", None)
# Recovery codes hash salt
RECOVERY_CODE_HMAC_SECRET = os.environ.get("RECOVERY_CODE_HMAC_SECRET") or (
FLASK_SECRET + "generatearandomtoken"
)
if not RECOVERY_CODE_HMAC_SECRET or len(RECOVERY_CODE_HMAC_SECRET) < 16:
raise RuntimeError(
"Please define RECOVERY_CODE_HMAC_SECRET in your configuration with a random string at least 16 chars long"
)
# the minimum rspamd spam score above which emails that fail DMARC should be quarantined
if "MIN_RSPAMD_SCORE_FOR_FAILED_DMARC" in os.environ:
MIN_RSPAMD_SCORE_FOR_FAILED_DMARC = float(
os.environ["MIN_RSPAMD_SCORE_FOR_FAILED_DMARC"]
)
else:
MIN_RSPAMD_SCORE_FOR_FAILED_DMARC = None
# run over all reverse alias for an alias and replace them with sender address
ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT = (
"ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT" in os.environ
)
if ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT:
# max number of reverse alias that can be replaced
MAX_NB_REVERSE_ALIAS_REPLACEMENT = int(
os.environ["MAX_NB_REVERSE_ALIAS_REPLACEMENT"]
)
# Only used for tests
SKIP_MX_LOOKUP_ON_CHECK = False
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ

View File

@ -0,0 +1,37 @@
from app.db import Session
from app.dns_utils import get_cname_record
from app.models import CustomDomain
class CustomDomainValidation:
def __init__(self, dkim_domain: str):
self.dkim_domain = dkim_domain
self._dkim_records = {
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}")
for key in ("dkim", "dkim02", "dkim03")
}
def get_dkim_records(self) -> {str: str}:
"""
Get a list of dkim records to set up. It will be
"""
return self._dkim_records
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
"""
Check if dkim records are properly set for this custom domain.
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
"""
invalid_records = {}
for prefix, expected_record in self.get_dkim_records():
custom_record = f"{prefix}.{custom_domain.domain}"
dkim_record = get_cname_record(custom_record)
if dkim_record != expected_record:
invalid_records[custom_record] = dkim_record or "empty"
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
if custom_domain.dkim_verified:
return invalid_records
custom_domain.dkim_verified = len(invalid_records) == 0
Session.commit()
return invalid_records

View File

@ -0,0 +1,35 @@
from .views import (
index,
pricing,
setting,
custom_alias,
subdomain,
billing,
alias_log,
alias_export,
unsubscribe,
api_key,
custom_domain,
alias_contact_manager,
enter_sudo,
mfa_setup,
mfa_cancel,
fido_setup,
coupon,
fido_manage,
domain_detail,
lifetime_licence,
directory,
mailbox,
mailbox_detail,
refused_email,
referral,
contact_detail,
setup_done,
batch_import,
alias_transfer,
app,
delete_account,
notification,
support,
)

View File

@ -0,0 +1,8 @@
from flask import Blueprint
dashboard_bp = Blueprint(
name="dashboard",
import_name=__name__,
url_prefix="/dashboard",
template_folder="templates",
)

View File

View File

@ -0,0 +1,332 @@
from dataclasses import dataclass
from operator import or_
from flask import render_template, request, redirect, flash
from flask import url_for
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from sqlalchemy import and_, func, case
from wtforms import StringField, validators, ValidationError
# Need to import directly from config to allow modification from the tests
from app import config, parallel_limiter
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
is_valid_email,
generate_reply_email,
parse_full_address,
)
from app.errors import (
CannotCreateContactForReverseAlias,
ErrContactErrorUpgradeNeeded,
ErrAddressInvalid,
ErrContactAlreadyExists,
)
from app.log import LOG
from app.models import Alias, Contact, EmailLog, User
from app.utils import sanitize_email, CSRFValidationForm
def email_validator():
"""validate email address. Handle both only email and email with name:
- ab@cd.com
- AB CD <ab@cd.com>
"""
message = "Invalid email format. Email must be either email@example.com or *First Last <email@example.com>*"
def _check(form, field):
email = field.data
email = email.strip()
email_part = email
if "<" in email and ">" in email:
if email.find("<") + 1 < email.find(">"):
email_part = email[email.find("<") + 1 : email.find(">")].strip()
if not is_valid_email(email_part):
raise ValidationError(message)
return _check
def user_can_create_contacts(user: User) -> bool:
if user.is_premium():
return True
if user.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
"""
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
Can throw exceptions:
- ErrAddressInvalid
- ErrContactAlreadyExists
- ErrContactUpgradeNeeded - If DISABLE_CREATE_CONTACTS_FOR_FREE_USERS this exception will be raised for new free users
"""
if not contact_address:
raise ErrAddressInvalid("Empty address")
try:
contact_name, contact_email = parse_full_address(contact_address)
except ValueError:
raise ErrAddressInvalid(contact_address)
contact_email = sanitize_email(contact_email)
if not is_valid_email(contact_email):
raise ErrAddressInvalid(contact_email)
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
raise ErrContactAlreadyExists(contact)
if not user_can_create_contacts(user):
raise ErrContactErrorUpgradeNeeded()
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
reply_email=generate_reply_email(contact_email, user),
)
LOG.d(
"create reverse-alias for %s %s, reverse alias:%s",
contact_address,
alias,
contact.reply_email,
)
Session.commit()
return contact
class NewContactForm(FlaskForm):
email = StringField(
"Email", validators=[validators.DataRequired(), email_validator()]
)
@dataclass
class ContactInfo(object):
contact: Contact
nb_forward: int
nb_reply: int
latest_email_log: EmailLog
def get_contact_infos(
alias: Alias, page=0, contact_id=None, query: str = ""
) -> [ContactInfo]:
"""if contact_id is set, only return the contact info for this contact"""
sub = (
Session.query(
Contact.id,
func.sum(case([(EmailLog.is_reply, 1)], else_=0)).label("nb_reply"),
func.sum(
case(
[
(
and_(
EmailLog.is_reply.is_(False),
EmailLog.blocked.is_(False),
),
1,
)
],
else_=0,
)
).label("nb_forward"),
func.max(EmailLog.created_at).label("max_email_log_created_at"),
)
.join(
EmailLog,
EmailLog.contact_id == Contact.id,
isouter=True,
)
.filter(Contact.alias_id == alias.id)
.group_by(Contact.id)
.subquery()
)
q = (
Session.query(
Contact,
EmailLog,
sub.c.nb_reply,
sub.c.nb_forward,
)
.join(
EmailLog,
EmailLog.contact_id == Contact.id,
isouter=True,
)
.filter(Contact.alias_id == alias.id)
.filter(Contact.id == sub.c.id)
.filter(
or_(
EmailLog.created_at == sub.c.max_email_log_created_at,
# no email log yet for this contact
sub.c.max_email_log_created_at.is_(None),
)
)
)
if query:
q = q.filter(
or_(
Contact.website_email.ilike(f"%{query}%"),
Contact.name.ilike(f"%{query}%"),
)
)
if contact_id:
q = q.filter(Contact.id == contact_id)
latest_activity = case(
[
(EmailLog.created_at > Contact.created_at, EmailLog.created_at),
(EmailLog.created_at < Contact.created_at, Contact.created_at),
],
else_=Contact.created_at,
)
q = (
q.order_by(latest_activity.desc())
.limit(config.PAGE_LIMIT)
.offset(page * config.PAGE_LIMIT)
)
ret = []
for contact, latest_email_log, nb_reply, nb_forward in q:
contact_info = ContactInfo(
contact=contact,
nb_forward=nb_forward,
nb_reply=nb_reply,
latest_email_log=latest_email_log,
)
ret.append(contact_info)
return ret
def delete_contact(alias: Alias, contact_id: int):
contact = Contact.get(contact_id)
if not contact:
flash("Unknown error. Refresh the page", "warning")
elif contact.alias_id != alias.id:
flash("You cannot delete reverse-alias", "warning")
else:
delete_contact_email = contact.website_email
Contact.delete(contact_id)
Session.commit()
flash(f"Reverse-alias for {delete_contact_email} has been deleted", "success")
@dashboard_bp.route("/alias_contact_manager/<int:alias_id>/", methods=["GET", "POST"])
@login_required
@parallel_limiter.lock(name="contact_creation")
def alias_contact_manager(alias_id):
highlight_contact_id = None
if request.args.get("highlight_contact_id"):
try:
highlight_contact_id = int(request.args.get("highlight_contact_id"))
except ValueError:
flash("Invalid contact id", "error")
return redirect(url_for("dashboard.index"))
alias = Alias.get(alias_id)
page = 0
if request.args.get("page"):
page = int(request.args.get("page"))
query = request.args.get("query") or ""
# sanity check
if not alias:
flash("You do not have access to this page", "warning")
return redirect(url_for("dashboard.index"))
if alias.user_id != current_user.id:
flash("You do not have access to this page", "warning")
return redirect(url_for("dashboard.index"))
new_contact_form = NewContactForm()
csrf_form = CSRFValidationForm()
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "create":
if new_contact_form.validate():
contact_address = new_contact_form.email.data.strip()
try:
contact = create_contact(current_user, alias, contact_address)
except (
ErrContactErrorUpgradeNeeded,
ErrAddressInvalid,
ErrContactAlreadyExists,
CannotCreateContactForReverseAlias,
) as excp:
flash(excp.error_for_user(), "error")
return redirect(request.url)
flash(f"Reverse alias for {contact_address} is created", "success")
return redirect(
url_for(
"dashboard.alias_contact_manager",
alias_id=alias_id,
highlight_contact_id=contact.id,
)
)
elif request.form.get("form-name") == "delete":
contact_id = request.form.get("contact-id")
delete_contact(alias, contact_id)
return redirect(
url_for("dashboard.alias_contact_manager", alias_id=alias_id)
)
elif request.form.get("form-name") == "search":
query = request.form.get("query")
return redirect(
url_for(
"dashboard.alias_contact_manager",
alias_id=alias_id,
query=query,
highlight_contact_id=highlight_contact_id,
)
)
contact_infos = get_contact_infos(alias, page, query=query)
last_page = len(contact_infos) < config.PAGE_LIMIT
nb_contact = Contact.filter(Contact.alias_id == alias.id).count()
# if highlighted contact isn't included, fetch it
# make sure highlighted contact is at array start
contact_ids = [contact_info.contact.id for contact_info in contact_infos]
if highlight_contact_id and highlight_contact_id not in contact_ids:
contact_infos = (
get_contact_infos(alias, contact_id=highlight_contact_id, query=query)
+ contact_infos
)
return render_template(
"dashboard/alias_contact_manager.html",
contact_infos=contact_infos,
alias=alias,
new_contact_form=new_contact_form,
highlight_contact_id=highlight_contact_id,
page=page,
last_page=last_page,
query=query,
nb_contact=nb_contact,
can_create_contacts=user_can_create_contacts(current_user),
csrf_form=csrf_form,
)

View File

@ -0,0 +1,9 @@
from app.dashboard.base import dashboard_bp
from flask_login import login_required, current_user
from app.alias_utils import alias_export_csv
@dashboard_bp.route("/alias_export", methods=["GET"])
@login_required
def alias_export_route():
return alias_export_csv(current_user)

View File

@ -0,0 +1,92 @@
import arrow
from flask import render_template, flash, redirect, url_for
from flask_login import login_required, current_user
from app.config import PAGE_LIMIT
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.models import Alias, EmailLog, Contact
class AliasLog:
website_email: str
reverse_alias: str
alias: str
when: arrow.Arrow
is_reply: bool
blocked: bool
bounced: bool
email_log: EmailLog
contact: Contact
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
@dashboard_bp.route(
"/alias_log/<int:alias_id>", methods=["GET"], defaults={"page_id": 0}
)
@dashboard_bp.route("/alias_log/<int:alias_id>/<int:page_id>")
@login_required
def alias_log(alias_id, page_id):
alias = Alias.get(alias_id)
# sanity check
if not alias:
flash("You do not have access to this page", "warning")
return redirect(url_for("dashboard.index"))
if alias.user_id != current_user.id:
flash("You do not have access to this page", "warning")
return redirect(url_for("dashboard.index"))
logs = get_alias_log(alias, page_id)
base = (
Session.query(Contact, EmailLog)
.filter(Contact.id == EmailLog.contact_id)
.filter(Contact.alias_id == alias.id)
)
total = base.count()
email_forwarded = (
base.filter(EmailLog.is_reply.is_(False))
.filter(EmailLog.blocked.is_(False))
.count()
)
email_replied = base.filter(EmailLog.is_reply.is_(True)).count()
email_blocked = base.filter(EmailLog.blocked.is_(True)).count()
last_page = (
len(logs) < PAGE_LIMIT
) # lightweight pagination without counting all objects
return render_template("dashboard/alias_log.html", **locals())
def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
logs: [AliasLog] = []
q = (
Session.query(Contact, EmailLog)
.filter(Contact.id == EmailLog.contact_id)
.filter(Contact.alias_id == alias.id)
.order_by(EmailLog.id.desc())
.limit(PAGE_LIMIT)
.offset(page_id * PAGE_LIMIT)
)
for contact, email_log in q:
al = AliasLog(
website_email=contact.website_email,
reverse_alias=contact.website_send_to(),
alias=alias.email,
when=email_log.created_at,
is_reply=email_log.is_reply,
blocked=email_log.blocked,
bounced=email_log.bounced,
email_log=email_log,
contact=contact,
)
logs.append(al)
logs = sorted(logs, key=lambda l: l.when, reverse=True)
return logs

View File

@ -0,0 +1,225 @@
import base64
import hmac
import secrets
import arrow
from flask import render_template, redirect, url_for, flash, request
from flask_login import login_required, current_user
from app import config
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.email_utils import send_email, render
from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
Contact,
AliasUsedOn,
AliasMailbox,
User,
ClientUser,
)
from app.models import Mailbox
from app.utils import CSRFValidationForm
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
# cannot transfer alias which is used for receiving newsletter
if User.get_by(newsletter_alias_id=alias.id):
raise Exception("Cannot transfer alias that's used to receive newsletter")
# update user_id
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
{"user_id": new_user.id}
)
# remove existing mailboxes from the alias
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
# set mailboxes
alias.mailbox_id = new_mailboxes.pop().id
for mb in new_mailboxes:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
# alias has never been transferred before
if not alias.original_owner_id:
alias.original_owner_id = alias.user_id
# inform previous owner
old_user = alias.user
send_email(
old_user.email,
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
alias=alias,
),
render(
"transactional/alias-transferred.html",
alias=alias,
),
)
# now the alias belongs to the new user
alias.user_id = new_user.id
# set some fields back to default
alias.disable_pgp = False
alias.pinned = False
Session.commit()
def hmac_alias_transfer_token(transfer_token: str) -> str:
alias_hmac = hmac.new(
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
transfer_token.encode("utf-8"),
"sha3_224",
)
return base64.urlsafe_b64encode(alias_hmac.digest()).decode("utf-8").rstrip("=")
@dashboard_bp.route("/alias_transfer/send/<int:alias_id>/", methods=["GET", "POST"])
@login_required
@sudo_required
def alias_transfer_send_route(alias_id):
alias = Alias.get(alias_id)
if not alias or alias.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
if current_user.newsletter_alias_id == alias.id:
flash(
"This alias is currently used for receiving the newsletter and cannot be transferred",
"error",
)
return redirect(url_for("dashboard.index"))
alias_transfer_url = None
csrf_form = CSRFValidationForm()
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
# generate a new transfer_token
if request.form.get("form-name") == "create":
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
Session.commit()
alias_transfer_url = (
config.URL
+ "/dashboard/alias_transfer/receive"
+ f"?token={transfer_token}"
)
flash("Share alias URL created", "success")
# request.form.get("form-name") == "remove"
else:
alias.transfer_token = None
alias.transfer_token_expiration = None
Session.commit()
alias_transfer_url = None
flash("Share URL deleted", "success")
return render_template(
"dashboard/alias_transfer_send.html",
alias=alias,
alias_transfer_url=alias_transfer_url,
link_active=alias.transfer_token_expiration is not None
and alias.transfer_token_expiration > arrow.utcnow(),
csrf_form=csrf_form,
)
@dashboard_bp.route("/alias_transfer/receive", methods=["GET", "POST"])
@limiter.limit("5/minute")
@login_required
def alias_transfer_receive_route():
"""
URL has ?alias_id=signed_alias_id
"""
token = request.args.get("token")
if not token:
flash("Invalid transfer token", "error")
return redirect(url_for("dashboard.index"))
hashed_token = hmac_alias_transfer_token(token)
# TODO: Don't allow unhashed tokens once all the tokens have been migrated to the new format
alias = Alias.get_by(transfer_token=token) or Alias.get_by(
transfer_token=hashed_token
)
if not alias:
flash("Invalid link", "error")
return redirect(url_for("dashboard.index"))
# TODO: Don't allow none once all the tokens have been migrated to the new format
if (
alias.transfer_token_expiration is not None
and alias.transfer_token_expiration < arrow.utcnow()
):
flash("Expired link, please request a new one", "error")
return redirect(url_for("dashboard.index"))
# alias already belongs to this user
if alias.user_id == current_user.id:
flash("You already own this alias", "warning")
return redirect(url_for("dashboard.index"))
# check if user has not exceeded the alias quota
if not current_user.can_create_new_alias():
LOG.d("%s can't receive new alias", current_user)
flash(
"You have reached free plan limit, please upgrade to create new aliases",
"warning",
)
return redirect(url_for("dashboard.index"))
mailboxes = current_user.mailboxes()
if request.method == "POST":
mailbox_ids = request.form.getlist("mailbox_ids")
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(request.url)
mailboxes.append(mailbox)
if not mailboxes:
flash("You must select at least 1 mailbox", "warning")
return redirect(request.url)
LOG.d(
"transfer alias %s from %s to %s with %s with token %s",
alias,
alias.user,
current_user,
mailboxes,
token,
)
transfer(alias, current_user, mailboxes)
flash(f"You are now owner of {alias.email}", "success")
return redirect(url_for("dashboard.index", highlight_alias_id=alias.id))
return render_template(
"dashboard/alias_transfer_receive.html",
alias=alias,
mailboxes=mailboxes,
)

View File

@ -0,0 +1,66 @@
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.models import ApiKey
class NewApiKeyForm(FlaskForm):
name = StringField("Name", validators=[validators.DataRequired()])
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
@login_required
@sudo_required
def api_key():
api_keys = (
ApiKey.filter(ApiKey.user_id == current_user.id)
.order_by(ApiKey.created_at.desc())
.all()
)
new_api_key_form = NewApiKeyForm()
if request.method == "POST":
if request.form.get("form-name") == "delete":
api_key_id = request.form.get("api-key-id")
api_key = ApiKey.get(api_key_id)
if not api_key:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.api_key"))
elif api_key.user_id != current_user.id:
flash("You cannot delete this api key", "warning")
return redirect(url_for("dashboard.api_key"))
name = api_key.name
ApiKey.delete(api_key_id)
Session.commit()
flash(f"API Key {name} has been deleted", "success")
elif request.form.get("form-name") == "create":
if new_api_key_form.validate():
new_api_key = ApiKey.create(
name=new_api_key_form.name.data, user_id=current_user.id
)
Session.commit()
flash(f"New API Key {new_api_key.name} has been created", "success")
return render_template(
"dashboard/new_api_key.html", api_key=new_api_key
)
elif request.form.get("form-name") == "delete-all":
ApiKey.delete_all(current_user.id)
Session.commit()
flash("All API Keys have been deleted", "success")
return redirect(url_for("dashboard.api_key"))
return render_template(
"dashboard/api_key.html", api_keys=api_keys, new_api_key_form=new_api_key_form
)

View File

@ -0,0 +1,48 @@
from app.db import Session
"""
List of apps that user has used via the "Sign in with SimpleLogin"
"""
from flask import render_template, request, flash, redirect
from flask_login import login_required, current_user
from sqlalchemy.orm import joinedload
from app.dashboard.base import dashboard_bp
from app.models import (
ClientUser,
)
@dashboard_bp.route("/app", methods=["GET", "POST"])
@login_required
def app_route():
client_users = (
ClientUser.filter_by(user_id=current_user.id)
.options(joinedload(ClientUser.client))
.options(joinedload(ClientUser.alias))
.all()
)
sorted(client_users, key=lambda cu: cu.client.name)
if request.method == "POST":
client_user_id = request.form.get("client-user-id")
client_user = ClientUser.get(client_user_id)
if not client_user or client_user.user_id != current_user.id:
flash(
"Unknown error, sorry for the inconvenience, refresh the page", "error"
)
return redirect(request.url)
client = client_user.client
ClientUser.delete(client_user_id)
Session.commit()
flash(f"Link with {client.name} has been removed", "success")
return redirect(request.url)
return render_template(
"dashboard/app.html",
client_users=client_users,
)

View File

@ -0,0 +1,78 @@
import arrow
from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required, current_user
from app import s3
from app.config import JOB_BATCH_IMPORT
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.log import LOG
from app.models import File, BatchImport, Job
from app.utils import random_string, CSRFValidationForm
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
@login_required
def batch_import_route():
# only for users who have custom domains
if not current_user.verified_custom_domains():
flash("Alias batch import is only available for custom domains", "warning")
if current_user.disable_import:
flash(
"you cannot use the import feature, please contact SimpleLogin team",
"error",
)
return redirect(url_for("dashboard.index"))
batch_imports = BatchImport.filter_by(
user_id=current_user.id, processed=False
).all()
csrf_form = CSRFValidationForm()
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
redirect(request.url)
if len(batch_imports) > 10:
flash(
"You have too many imports already. Wait until some get cleaned up",
"error",
)
return render_template(
"dashboard/batch_import.html",
batch_imports=batch_imports,
csrf_form=csrf_form,
)
alias_file = request.files["alias-file"]
file_path = random_string(20) + ".csv"
file = File.create(user_id=current_user.id, path=file_path)
s3.upload_from_bytesio(file_path, alias_file)
Session.flush()
LOG.d("upload file %s to s3 at %s", file, file_path)
bi = BatchImport.create(user_id=current_user.id, file_id=file.id)
Session.flush()
LOG.d("Add a batch import job %s for %s", bi, current_user)
# Schedule batch import job
Job.create(
name=JOB_BATCH_IMPORT,
payload={"batch_import_id": bi.id},
run_at=arrow.now(),
)
Session.commit()
flash(
"The file has been uploaded successfully and the import will start shortly",
"success",
)
return redirect(url_for("dashboard.batch_import_route"))
return render_template(
"dashboard/batch_import.html", batch_imports=batch_imports, csrf_form=csrf_form
)

View File

@ -0,0 +1,82 @@
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_required, current_user
from app.config import PADDLE_MONTHLY_PRODUCT_ID, PADDLE_YEARLY_PRODUCT_ID
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.log import LOG
from app.models import Subscription, PlanEnum
from app.paddle_utils import cancel_subscription, change_plan
@dashboard_bp.route("/billing", methods=["GET", "POST"])
@login_required
def billing():
# sanity check: make sure this page is only for user who has paddle subscription
sub: Subscription = current_user.get_paddle_subscription()
if not sub:
flash("You don't have any active subscription", "warning")
return redirect(url_for("dashboard.index"))
if request.method == "POST":
if request.form.get("form-name") == "cancel":
LOG.w(f"User {current_user} cancels their subscription")
success = cancel_subscription(sub.subscription_id)
if success:
sub.cancelled = True
Session.commit()
flash("Your subscription has been canceled successfully", "success")
else:
flash(
"Something went wrong, sorry for the inconvenience. Please retry. "
"We are already notified and will be on it asap",
"error",
)
return redirect(url_for("dashboard.billing"))
elif request.form.get("form-name") == "change-monthly":
LOG.d(f"User {current_user} changes to monthly plan")
success, msg = change_plan(
current_user, sub.subscription_id, PADDLE_MONTHLY_PRODUCT_ID
)
if success:
sub.plan = PlanEnum.monthly
Session.commit()
flash("Your subscription has been updated", "success")
else:
if msg:
flash(msg, "error")
else:
flash(
"Something went wrong, sorry for the inconvenience. Please retry. "
"We are already notified and will be on it asap",
"error",
)
return redirect(url_for("dashboard.billing"))
elif request.form.get("form-name") == "change-yearly":
LOG.d(f"User {current_user} changes to yearly plan")
success, msg = change_plan(
current_user, sub.subscription_id, PADDLE_YEARLY_PRODUCT_ID
)
if success:
sub.plan = PlanEnum.yearly
Session.commit()
flash("Your subscription has been updated", "success")
else:
if msg:
flash(msg, "error")
else:
flash(
"Something went wrong, sorry for the inconvenience. Please retry. "
"We are already notified and will be on it asap",
"error",
)
return redirect(url_for("dashboard.billing"))
return render_template("dashboard/billing.html", sub=sub, PlanEnum=PlanEnum)

View File

@ -0,0 +1,75 @@
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.models import Contact
from app.pgp_utils import PGPException, load_public_key_and_check
class PGPContactForm(FlaskForm):
action = StringField(
"action",
validators=[validators.DataRequired(), validators.AnyOf(("save", "remove"))],
)
pgp = StringField("pgp", validators=[validators.Optional()])
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
@login_required
def contact_detail_route(contact_id):
contact = Contact.get(contact_id)
if not contact or contact.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
alias = contact.alias
pgp_form = PGPContactForm()
if request.method == "POST":
if request.form.get("form-name") == "pgp":
if not pgp_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if pgp_form.action.data == "save":
if not current_user.is_premium():
flash("Only premium plan can add PGP Key", "warning")
return redirect(
url_for("dashboard.contact_detail_route", contact_id=contact_id)
)
if not pgp_form.pgp.data:
flash("Invalid pgp key")
else:
contact.pgp_public_key = pgp_form.pgp.data
try:
contact.pgp_finger_print = load_public_key_and_check(
contact.pgp_public_key
)
except PGPException:
flash("Cannot add the public key, please verify it", "error")
else:
Session.commit()
flash(
f"PGP public key for {contact.email} is saved successfully",
"success",
)
return redirect(
url_for(
"dashboard.contact_detail_route", contact_id=contact_id
)
)
elif pgp_form.action.data == "remove":
# Free user can decide to remove contact PGP key
contact.pgp_public_key = None
contact.pgp_finger_print = None
Session.commit()
flash(f"PGP public key for {contact.email} is removed", "success")
return redirect(
url_for("dashboard.contact_detail_route", contact_id=contact_id)
)
return render_template(
"dashboard/contact_detail.html", contact=contact, alias=alias, pgp_form=pgp_form
)

View File

@ -0,0 +1,116 @@
import arrow
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app import parallel_limiter
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.log import LOG
from app.models import (
ManualSubscription,
Coupon,
Subscription,
AppleSubscription,
CoinbaseSubscription,
LifetimeCoupon,
)
class CouponForm(FlaskForm):
code = StringField("Coupon Code", validators=[validators.DataRequired()])
@dashboard_bp.route("/coupon", methods=["GET", "POST"])
@login_required
@parallel_limiter.lock()
def coupon_route():
coupon_form = CouponForm()
if coupon_form.validate_on_submit():
code = coupon_form.code.data
if LifetimeCoupon.get_by(code=code):
LOG.d("redirect %s to lifetime page instead", current_user)
flash("Redirect to the lifetime coupon page instead", "success")
return redirect(url_for("dashboard.lifetime_licence"))
# handle case user already has an active subscription via another channel (Paddle, Apple, etc)
can_use_coupon = True
if current_user.lifetime:
can_use_coupon = False
sub: Subscription = current_user.get_paddle_subscription()
if sub:
can_use_coupon = False
apple_sub: AppleSubscription = AppleSubscription.get_by(user_id=current_user.id)
if apple_sub and apple_sub.is_valid():
can_use_coupon = False
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
user_id=current_user.id
)
if coinbase_subscription and coinbase_subscription.is_active():
can_use_coupon = False
if coupon_form.validate_on_submit():
code = coupon_form.code.data
coupon: Coupon = Coupon.get_by(code=code)
if coupon and not coupon.used:
if coupon.expires_date and coupon.expires_date < arrow.now():
flash(
f"The coupon was expired on {coupon.expires_date.humanize()}",
"error",
)
return redirect(request.url)
coupon.used_by_user_id = current_user.id
coupon.used = True
Session.commit()
manual_sub: ManualSubscription = ManualSubscription.get_by(
user_id=current_user.id
)
if manual_sub:
# renew existing subscription
if manual_sub.end_at > arrow.now():
manual_sub.end_at = manual_sub.end_at.shift(years=coupon.nb_year)
else:
manual_sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
Session.commit()
flash(
f"Your current subscription is extended to {manual_sub.end_at.humanize()}",
"success",
)
else:
ManualSubscription.create(
user_id=current_user.id,
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
comment="using coupon code",
is_giveaway=coupon.is_giveaway,
commit=True,
)
flash(
f"Your account has been upgraded to Premium, thanks for your support!",
"success",
)
return redirect(url_for("dashboard.index"))
else:
flash(f"Code *{code}* expired or invalid", "warning")
return render_template(
"dashboard/coupon.html",
coupon_form=coupon_form,
PADDLE_VENDOR_ID=PADDLE_VENDOR_ID,
PADDLE_COUPON_ID=PADDLE_COUPON_ID,
can_use_coupon=can_use_coupon,
# a coupon is only valid until this date
# this is to avoid using the coupon to renew an account forever
max_coupon_date=arrow.now().shift(years=1, days=-1),
)

View File

@ -0,0 +1,174 @@
from email_validator import validate_email, EmailNotValidError
from flask import render_template, redirect, url_for, flash, request
from flask_login import login_required, current_user
from sqlalchemy.exc import IntegrityError
from app import parallel_limiter
from app.alias_suffix import (
get_alias_suffixes,
check_suffix_signature,
verify_prefix_suffix,
)
from app.alias_utils import check_alias_prefix
from app.config import (
ALIAS_LIMIT,
)
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
DeletedAlias,
Mailbox,
AliasMailbox,
DomainDeletedAlias,
)
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
@limiter.limit(ALIAS_LIMIT, methods=["POST"])
@login_required
@parallel_limiter.lock(name="alias_creation")
def custom_alias():
# check if user has not exceeded the alias quota
if not current_user.can_create_new_alias():
LOG.d("%s can't create new alias", current_user)
flash(
"You have reached free plan limit, please upgrade to create new aliases",
"warning",
)
return redirect(url_for("dashboard.index"))
user_custom_domains = [cd.domain for cd in current_user.verified_custom_domains()]
alias_suffixes = get_alias_suffixes(current_user)
at_least_a_premium_domain = False
for alias_suffix in alias_suffixes:
if not alias_suffix.is_custom and alias_suffix.is_premium:
at_least_a_premium_domain = True
break
mailboxes = current_user.mailboxes()
if request.method == "POST":
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
signed_alias_suffix = request.form.get("signed-alias-suffix")
mailbox_ids = request.form.getlist("mailboxes")
alias_note = request.form.get("note")
if not check_alias_prefix(alias_prefix):
flash(
"Only lowercase letters, numbers, dashes (-), dots (.) and underscores (_) "
"are currently supported for alias prefix. Cannot be more than 40 letters",
"error",
)
return redirect(request.url)
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(request.url)
mailboxes.append(mailbox)
if not mailboxes:
flash("At least one mailbox must be selected", "error")
return redirect(request.url)
try:
suffix = check_suffix_signature(signed_alias_suffix)
if not suffix:
LOG.w("Alias creation time expired for %s", current_user)
flash("Alias creation time is expired, please retry", "warning")
return redirect(request.url)
except Exception:
LOG.w("Alias suffix is tampered, user %s", current_user)
flash("Unknown error, refresh the page", "error")
return redirect(request.url)
if verify_prefix_suffix(current_user, alias_prefix, suffix):
full_alias = alias_prefix + suffix
if ".." in full_alias:
flash("Your alias can't contain 2 consecutive dots (..)", "error")
return redirect(request.url)
try:
validate_email(
full_alias, check_deliverability=False, allow_smtputf8=False
)
except EmailNotValidError as e:
flash(str(e), "error")
return redirect(request.url)
general_error_msg = f"{full_alias} cannot be used"
if Alias.get_by(email=full_alias):
alias = Alias.get_by(email=full_alias)
if alias.user_id == current_user.id:
flash(f"You already have this alias {full_alias}", "error")
else:
flash(general_error_msg, "error")
elif DomainDeletedAlias.get_by(email=full_alias):
domain_deleted_alias: DomainDeletedAlias = DomainDeletedAlias.get_by(
email=full_alias
)
custom_domain = domain_deleted_alias.domain
if domain_deleted_alias.user_id == current_user.id:
flash(
f"You have deleted this alias before. You can restore it on "
f"{custom_domain.domain} 'Deleted Alias' page",
"error",
)
else:
# should never happen as user can only choose their domains
LOG.e(
"Deleted Alias %s does not belong to user %s",
domain_deleted_alias,
)
elif DeletedAlias.get_by(email=full_alias):
flash(general_error_msg, "error")
else:
try:
alias = Alias.create(
user_id=current_user.id,
email=full_alias,
note=alias_note,
mailbox_id=mailboxes[0].id,
)
Session.flush()
except IntegrityError:
LOG.w("Alias %s already exists", full_alias)
Session.rollback()
flash("Unknown error, please retry", "error")
return redirect(url_for("dashboard.custom_alias"))
for i in range(1, len(mailboxes)):
AliasMailbox.create(
alias_id=alias.id,
mailbox_id=mailboxes[i].id,
)
Session.commit()
flash(f"Alias {full_alias} has been created", "success")
return redirect(url_for("dashboard.index", highlight_alias_id=alias.id))
# only happen if the request has been "hacked"
else:
flash("something went wrong", "warning")
return render_template(
"dashboard/custom_alias.html",
user_custom_domains=user_custom_domains,
alias_suffixes=alias_suffixes,
at_least_a_premium_domain=at_least_a_premium_domain,
mailboxes=mailboxes,
)

View File

@ -0,0 +1,121 @@
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.config import EMAIL_SERVERS_WITH_PRIORITY
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
class NewCustomDomainForm(FlaskForm):
domain = StringField(
"domain", validators=[validators.DataRequired(), validators.Length(max=128)]
)
@dashboard_bp.route("/custom_domain", methods=["GET", "POST"])
@login_required
def custom_domain():
custom_domains = CustomDomain.filter_by(
user_id=current_user.id, is_sl_subdomain=False
).all()
mailboxes = current_user.mailboxes()
new_custom_domain_form = NewCustomDomainForm()
errors = {}
if request.method == "POST":
if request.form.get("form-name") == "create":
if not current_user.is_premium():
flash("Only premium plan can add custom domain", "warning")
return redirect(url_for("dashboard.custom_domain"))
if new_custom_domain_form.validate():
new_domain = new_custom_domain_form.domain.data.lower().strip()
if new_domain.startswith("http://"):
new_domain = new_domain[len("http://") :]
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
if SLDomain.get_by(domain=new_domain):
flash("A custom domain cannot be a built-in domain.", "error")
elif CustomDomain.get_by(domain=new_domain):
flash(f"{new_domain} already used", "error")
elif get_email_domain_part(current_user.email) == new_domain:
flash(
"You cannot add a domain that you are currently using for your personal email. "
"Please change your personal email to your real email",
"error",
)
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
).first():
flash(
f"{new_domain} already used in a SimpleLogin mailbox", "error"
)
else:
new_custom_domain = CustomDomain.create(
domain=new_domain, user_id=current_user.id
)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in current_user.custom_domains:
if (
new_domain.endswith("." + root_cd.domain)
and root_cd.ownership_verified
):
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
Session.commit()
mailbox_ids = request.form.getlist("mailbox_ids")
if mailbox_ids:
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(url_for("dashboard.custom_domain"))
mailboxes.append(mailbox)
for mailbox in mailboxes:
DomainMailbox.create(
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
)
Session.commit()
flash(
f"New domain {new_custom_domain.domain} is created", "success"
)
return redirect(
url_for(
"dashboard.domain_detail_dns",
custom_domain_id=new_custom_domain.id,
)
)
return render_template(
"dashboard/custom_domain.html",
custom_domains=custom_domains,
new_custom_domain_form=new_custom_domain_form,
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
errors=errors,
mailboxes=mailboxes,
)

View File

@ -0,0 +1,50 @@
import arrow
from flask import flash, redirect, url_for, request, render_template
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from app.config import JOB_DELETE_ACCOUNT
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.log import LOG
from app.models import Subscription, Job
class DeleteDirForm(FlaskForm):
pass
@dashboard_bp.route("/delete_account", methods=["GET", "POST"])
@login_required
@sudo_required
def delete_account():
delete_form = DeleteDirForm()
if request.method == "POST" and request.form.get("form-name") == "delete-account":
if not delete_form.validate():
flash("Invalid request", "warning")
return render_template(
"dashboard/delete_account.html", delete_form=delete_form
)
sub: Subscription = current_user.get_paddle_subscription()
# user who has canceled can also re-subscribe
if sub and not sub.cancelled:
flash("Please cancel your current subscription first", "warning")
return redirect(url_for("dashboard.setting"))
# Schedule delete account job
LOG.w("schedule delete account job for %s", current_user)
Job.create(
name=JOB_DELETE_ACCOUNT,
payload={"user_id": current_user.id},
run_at=arrow.now(),
commit=True,
)
flash(
"Your account deletion has been scheduled. "
"You'll receive an email when the deletion is finished",
"info",
)
return redirect(url_for("dashboard.setting"))
return render_template("dashboard/delete_account.html", delete_form=delete_form)

View File

@ -0,0 +1,227 @@
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import (
StringField,
validators,
SelectMultipleField,
BooleanField,
IntegerField,
)
from app.config import (
EMAIL_DOMAIN,
ALIAS_DOMAINS,
MAX_NB_DIRECTORY,
BOUNCE_PREFIX_FOR_REPLY_PHASE,
)
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.errors import DirectoryInTrashError
from app.models import Directory, Mailbox, DirectoryMailbox
class NewDirForm(FlaskForm):
name = StringField(
"name", validators=[validators.DataRequired(), validators.Length(min=3)]
)
class ToggleDirForm(FlaskForm):
directory_id = IntegerField(validators=[validators.DataRequired()])
directory_enabled = BooleanField(validators=[])
class UpdateDirForm(FlaskForm):
directory_id = IntegerField(validators=[validators.DataRequired()])
mailbox_ids = SelectMultipleField(
validators=[validators.DataRequired()], validate_choice=False, choices=[]
)
class DeleteDirForm(FlaskForm):
directory_id = IntegerField(validators=[validators.DataRequired()])
@dashboard_bp.route("/directory", methods=["GET", "POST"])
@login_required
def directory():
dirs = (
Directory.filter_by(user_id=current_user.id)
.order_by(Directory.created_at.desc())
.all()
)
mailboxes = current_user.mailboxes()
new_dir_form = NewDirForm()
toggle_dir_form = ToggleDirForm()
update_dir_form = UpdateDirForm()
update_dir_form.mailbox_ids.choices = [
(str(mailbox.id), str(mailbox.id)) for mailbox in mailboxes
]
delete_dir_form = DeleteDirForm()
if request.method == "POST":
if request.form.get("form-name") == "delete":
if not delete_dir_form.validate():
flash(f"Invalid request", "warning")
return redirect(url_for("dashboard.directory"))
dir_obj = Directory.get(delete_dir_form.directory_id.data)
if not dir_obj:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.directory"))
elif dir_obj.user_id != current_user.id:
flash("You cannot delete this directory", "warning")
return redirect(url_for("dashboard.directory"))
name = dir_obj.name
Directory.delete(dir_obj.id)
Session.commit()
flash(f"Directory {name} has been deleted", "success")
return redirect(url_for("dashboard.directory"))
if request.form.get("form-name") == "toggle-directory":
if not toggle_dir_form.validate():
flash(f"Invalid request", "warning")
return redirect(url_for("dashboard.directory"))
dir_id = toggle_dir_form.directory_id.data
dir_obj = Directory.get(dir_id)
if not dir_obj or dir_obj.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.directory"))
if toggle_dir_form.directory_enabled.data:
dir_obj.disabled = False
flash(f"On-the-fly is enabled for {dir_obj.name}", "success")
else:
dir_obj.disabled = True
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
Session.commit()
return redirect(url_for("dashboard.directory"))
elif request.form.get("form-name") == "update":
if not update_dir_form.validate():
flash(f"Invalid request", "warning")
return redirect(url_for("dashboard.directory"))
dir_id = update_dir_form.directory_id.data
dir_obj = Directory.get(dir_id)
if not dir_obj or dir_obj.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.directory"))
mailbox_ids = update_dir_form.mailbox_ids.data
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(url_for("dashboard.directory"))
mailboxes.append(mailbox)
if not mailboxes:
flash("You must select at least 1 mailbox", "warning")
return redirect(url_for("dashboard.directory"))
# first remove all existing directory-mailboxes links
DirectoryMailbox.filter_by(directory_id=dir_obj.id).delete()
Session.flush()
for mailbox in mailboxes:
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
Session.commit()
flash(f"Directory {dir_obj.name} has been updated", "success")
return redirect(url_for("dashboard.directory"))
elif request.form.get("form-name") == "create":
if not current_user.is_premium():
flash("Only premium plan can add directory", "warning")
return redirect(url_for("dashboard.directory"))
if current_user.directory_quota <= 0:
flash(
f"You cannot have more than {MAX_NB_DIRECTORY} directories",
"warning",
)
return redirect(url_for("dashboard.directory"))
if new_dir_form.validate():
new_dir_name = new_dir_form.name.data.lower()
if Directory.get_by(name=new_dir_name):
flash(f"{new_dir_name} already used", "warning")
elif new_dir_name in (
"reply",
"ra",
"bounces",
"bounce",
"transactional",
BOUNCE_PREFIX_FOR_REPLY_PHASE,
):
flash(
"this directory name is reserved, please choose another name",
"warning",
)
else:
try:
new_dir = Directory.create(
name=new_dir_name, user_id=current_user.id
)
except DirectoryInTrashError:
flash(
f"{new_dir_name} has been used before and cannot be reused",
"error",
)
else:
Session.commit()
mailbox_ids = request.form.getlist("mailbox_ids")
if mailbox_ids:
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash(
"Something went wrong, please retry", "warning"
)
return redirect(url_for("dashboard.directory"))
mailboxes.append(mailbox)
for mailbox in mailboxes:
DirectoryMailbox.create(
directory_id=new_dir.id, mailbox_id=mailbox.id
)
Session.commit()
flash(f"Directory {new_dir.name} is created", "success")
return redirect(url_for("dashboard.directory"))
return render_template(
"dashboard/directory.html",
dirs=dirs,
toggle_dir_form=toggle_dir_form,
update_dir_form=update_dir_form,
delete_dir_form=delete_dir_form,
new_dir_form=new_dir_form,
mailboxes=mailboxes,
EMAIL_DOMAIN=EMAIL_DOMAIN,
ALIAS_DOMAINS=ALIAS_DOMAINS,
)

View File

@ -0,0 +1,528 @@
import re
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators, IntegerField
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
from app.custom_domain_validation import CustomDomainValidation
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.dns_utils import (
get_mx_domains,
get_spf_domain,
get_txt_record,
is_mx_equivalent,
)
from app.log import LOG
from app.models import (
CustomDomain,
Alias,
DomainDeletedAlias,
Mailbox,
DomainMailbox,
AutoCreateRule,
AutoCreateRuleMailbox,
Job,
)
from app.regex_utils import regex_match
from app.utils import random_string, CSRFValidationForm
@dashboard_bp.route("/domains/<int:custom_domain_id>/dns", methods=["GET", "POST"])
@login_required
def domain_detail_dns(custom_domain_id):
custom_domain: CustomDomain = CustomDomain.get(custom_domain_id)
if not custom_domain or custom_domain.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
# generate a domain ownership txt token if needed
if not custom_domain.ownership_verified and not custom_domain.ownership_txt_token:
custom_domain.ownership_txt_token = random_string(30)
Session.commit()
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
csrf_form = CSRFValidationForm()
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "check-ownership":
txt_records = get_txt_record(custom_domain.domain)
if custom_domain.get_ownership_dns_txt_value() in txt_records:
flash(
"Domain ownership is verified. Please proceed to the other records setup",
"success",
)
custom_domain.ownership_verified = True
Session.commit()
return redirect(
url_for(
"dashboard.domain_detail_dns",
custom_domain_id=custom_domain.id,
_anchor="dns-setup",
)
)
else:
flash("We can't find the needed TXT record", "error")
ownership_ok = False
ownership_errors = txt_records
elif request.form.get("form-name") == "check-mx":
mx_domains = get_mx_domains(custom_domain.domain)
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
flash("The MX record is not correctly set", "warning")
mx_ok = False
# build mx_errors to show to user
mx_errors = [
f"{priority} {domain}" for (priority, domain) in mx_domains
]
else:
flash(
"Your domain can start receiving emails. You can now use it to create alias",
"success",
)
custom_domain.verified = True
Session.commit()
return redirect(
url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
)
)
elif request.form.get("form-name") == "check-spf":
spf_domains = get_spf_domain(custom_domain.domain)
if EMAIL_DOMAIN in spf_domains:
custom_domain.spf_verified = True
Session.commit()
flash("SPF is setup correctly", "success")
return redirect(
url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
)
)
else:
custom_domain.spf_verified = False
Session.commit()
flash(
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
"warning",
)
spf_ok = False
spf_errors = get_txt_record(custom_domain.domain)
elif request.form.get("form-name") == "check-dkim":
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
if len(dkim_errors) == 0:
flash("DKIM is setup correctly.", "success")
return redirect(
url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
)
)
else:
dkim_ok = False
flash("DKIM: the CNAME record is not correctly set", "warning")
elif request.form.get("form-name") == "check-dmarc":
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
if dmarc_record in txt_records:
custom_domain.dmarc_verified = True
Session.commit()
flash("DMARC is setup correctly", "success")
return redirect(
url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
)
)
else:
custom_domain.dmarc_verified = False
Session.commit()
flash(
"DMARC: The TXT record is not correctly set",
"warning",
)
dmarc_ok = False
dmarc_errors = txt_records
return render_template(
"dashboard/domain_detail/dns.html",
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
dkim_records=domain_validator.get_dkim_records(),
**locals(),
)
@dashboard_bp.route("/domains/<int:custom_domain_id>/info", methods=["GET", "POST"])
@login_required
def domain_detail(custom_domain_id):
csrf_form = CSRFValidationForm()
custom_domain: CustomDomain = CustomDomain.get(custom_domain_id)
mailboxes = current_user.mailboxes()
if not custom_domain or custom_domain.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "switch-catch-all":
custom_domain.catch_all = not custom_domain.catch_all
Session.commit()
if custom_domain.catch_all:
flash(
f"The catch-all has been enabled for {custom_domain.domain}",
"success",
)
else:
flash(
f"The catch-all has been disabled for {custom_domain.domain}",
"warning",
)
return redirect(
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
)
elif request.form.get("form-name") == "set-name":
if request.form.get("action") == "save":
custom_domain.name = request.form.get("alias-name").replace("\n", "")
Session.commit()
flash(
f"Default alias name for Domain {custom_domain.domain} has been set",
"success",
)
else:
custom_domain.name = None
Session.commit()
flash(
f"Default alias name for Domain {custom_domain.domain} has been removed",
"info",
)
return redirect(
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
)
elif request.form.get("form-name") == "switch-random-prefix-generation":
custom_domain.random_prefix_generation = (
not custom_domain.random_prefix_generation
)
Session.commit()
if custom_domain.random_prefix_generation:
flash(
f"Random prefix generation has been enabled for {custom_domain.domain}",
"success",
)
else:
flash(
f"Random prefix generation has been disabled for {custom_domain.domain}",
"warning",
)
return redirect(
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
)
elif request.form.get("form-name") == "update":
mailbox_ids = request.form.getlist("mailbox_ids")
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(
url_for(
"dashboard.domain_detail", custom_domain_id=custom_domain.id
)
)
mailboxes.append(mailbox)
if not mailboxes:
flash("You must select at least 1 mailbox", "warning")
return redirect(
url_for(
"dashboard.domain_detail", custom_domain_id=custom_domain.id
)
)
# first remove all existing domain-mailboxes links
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
Session.flush()
for mailbox in mailboxes:
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
Session.commit()
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
return redirect(
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
)
elif request.form.get("form-name") == "delete":
name = custom_domain.domain
LOG.d("Schedule deleting %s", custom_domain)
# Schedule delete domain job
LOG.w("schedule delete domain job for %s", custom_domain)
Job.create(
name=JOB_DELETE_DOMAIN,
payload={"custom_domain_id": custom_domain.id},
run_at=arrow.now(),
commit=True,
)
flash(
f"{name} scheduled for deletion."
f"You will receive a confirmation email when the deletion is finished",
"success",
)
if custom_domain.is_sl_subdomain:
return redirect(url_for("dashboard.subdomain_route"))
else:
return redirect(url_for("dashboard.custom_domain"))
nb_alias = Alias.filter_by(custom_domain_id=custom_domain.id).count()
return render_template("dashboard/domain_detail/info.html", **locals())
@dashboard_bp.route("/domains/<int:custom_domain_id>/trash", methods=["GET", "POST"])
@login_required
def domain_detail_trash(custom_domain_id):
csrf_form = CSRFValidationForm()
custom_domain = CustomDomain.get(custom_domain_id)
if not custom_domain or custom_domain.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "empty-all":
DomainDeletedAlias.filter_by(domain_id=custom_domain.id).delete()
Session.commit()
flash("All deleted aliases can now be re-created", "success")
return redirect(
url_for(
"dashboard.domain_detail_trash", custom_domain_id=custom_domain.id
)
)
elif request.form.get("form-name") == "remove-single":
deleted_alias_id = request.form.get("deleted-alias-id")
deleted_alias = DomainDeletedAlias.get(deleted_alias_id)
if not deleted_alias or deleted_alias.domain_id != custom_domain.id:
flash("Unknown error, refresh the page", "warning")
return redirect(
url_for(
"dashboard.domain_detail_trash",
custom_domain_id=custom_domain.id,
)
)
DomainDeletedAlias.delete(deleted_alias.id)
Session.commit()
flash(
f"{deleted_alias.email} can now be re-created",
"success",
)
return redirect(
url_for(
"dashboard.domain_detail_trash", custom_domain_id=custom_domain.id
)
)
domain_deleted_aliases = DomainDeletedAlias.filter_by(
domain_id=custom_domain.id
).all()
return render_template(
"dashboard/domain_detail/trash.html",
domain_deleted_aliases=domain_deleted_aliases,
custom_domain=custom_domain,
csrf_form=csrf_form,
)
class AutoCreateRuleForm(FlaskForm):
regex = StringField(
"regex", validators=[validators.DataRequired(), validators.Length(max=128)]
)
order = IntegerField(
"order",
validators=[validators.DataRequired(), validators.NumberRange(min=0, max=100)],
)
class AutoCreateTestForm(FlaskForm):
local = StringField(
"local part", validators=[validators.DataRequired(), validators.Length(max=128)]
)
@dashboard_bp.route(
"/domains/<int:custom_domain_id>/auto-create", methods=["GET", "POST"]
)
@login_required
def domain_detail_auto_create(custom_domain_id):
custom_domain: CustomDomain = CustomDomain.get(custom_domain_id)
mailboxes = current_user.mailboxes()
new_auto_create_rule_form = AutoCreateRuleForm()
auto_create_test_form = AutoCreateTestForm()
auto_create_test_local, auto_create_test_result, auto_create_test_passed = (
"",
"",
False,
)
if not custom_domain or custom_domain.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
if request.method == "POST":
if request.form.get("form-name") == "create-auto-create-rule":
if new_auto_create_rule_form.validate():
# make sure order isn't used before
for auto_create_rule in custom_domain.auto_create_rules:
auto_create_rule: AutoCreateRule
if auto_create_rule.order == int(
new_auto_create_rule_form.order.data
):
flash(
"Another rule with the same order already exists", "error"
)
break
else:
mailbox_ids = request.form.getlist("mailbox_ids")
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(
url_for(
"dashboard.domain_detail_auto_create",
custom_domain_id=custom_domain.id,
)
)
mailboxes.append(mailbox)
if not mailboxes:
flash("You must select at least 1 mailbox", "warning")
return redirect(
url_for(
"dashboard.domain_detail_auto_create",
custom_domain_id=custom_domain.id,
)
)
try:
re.compile(new_auto_create_rule_form.regex.data)
except Exception:
flash(
f"Invalid regex {new_auto_create_rule_form.regex.data}",
"error",
)
return redirect(
url_for(
"dashboard.domain_detail_auto_create",
custom_domain_id=custom_domain.id,
)
)
rule = AutoCreateRule.create(
custom_domain_id=custom_domain.id,
order=int(new_auto_create_rule_form.order.data),
regex=new_auto_create_rule_form.regex.data,
flush=True,
)
for mailbox in mailboxes:
AutoCreateRuleMailbox.create(
auto_create_rule_id=rule.id, mailbox_id=mailbox.id
)
Session.commit()
flash("New auto create rule has been created", "success")
return redirect(
url_for(
"dashboard.domain_detail_auto_create",
custom_domain_id=custom_domain.id,
)
)
elif request.form.get("form-name") == "delete-auto-create-rule":
rule_id = request.form.get("rule-id")
rule: AutoCreateRule = AutoCreateRule.get(int(rule_id))
if not rule or rule.custom_domain_id != custom_domain.id:
flash("Something wrong, please retry", "error")
return redirect(
url_for(
"dashboard.domain_detail_auto_create",
custom_domain_id=custom_domain.id,
)
)
rule_order = rule.order
AutoCreateRule.delete(rule_id)
Session.commit()
flash(f"Rule #{rule_order} has been deleted", "success")
return redirect(
url_for(
"dashboard.domain_detail_auto_create",
custom_domain_id=custom_domain.id,
)
)
elif request.form.get("form-name") == "test-auto-create-rule":
if auto_create_test_form.validate():
local = auto_create_test_form.local.data
auto_create_test_local = local
for rule in custom_domain.auto_create_rules:
if regex_match(rule.regex, local):
auto_create_test_result = (
f"{local}@{custom_domain.domain} passes rule #{rule.order}"
)
auto_create_test_passed = True
break
else: # no rule passes
auto_create_test_result = (
f"{local}@{custom_domain.domain} doesn't pass any rule"
)
return render_template(
"dashboard/domain_detail/auto-create.html", **locals()
)
return render_template("dashboard/domain_detail/auto-create.html", **locals())

View File

@ -0,0 +1,70 @@
from functools import wraps
from time import time
from flask import render_template, flash, redirect, url_for, session, request
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import PasswordField, validators
from app.config import CONNECT_WITH_PROTON
from app.dashboard.base import dashboard_bp
from app.log import LOG
from app.models import PartnerUser
from app.proton.utils import get_proton_partner
from app.utils import sanitize_next_url
_SUDO_GAP = 900
class LoginForm(FlaskForm):
password = PasswordField("Password", validators=[validators.DataRequired()])
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
@login_required
def enter_sudo():
password_check_form = LoginForm()
if password_check_form.validate_on_submit():
password = password_check_form.password.data
if current_user.check_password(password):
session["sudo_time"] = int(time())
# User comes to sudo page from another page
next_url = sanitize_next_url(request.args.get("next"))
if next_url:
LOG.d("redirect user to %s", next_url)
return redirect(next_url)
else:
LOG.d("redirect user to dashboard")
return redirect(url_for("dashboard.index"))
else:
flash("Incorrect password", "warning")
proton_enabled = CONNECT_WITH_PROTON
if proton_enabled:
# Only for users that have the account linked
partner_user = PartnerUser.get_by(user_id=current_user.id)
if not partner_user or partner_user.partner_id != get_proton_partner().id:
proton_enabled = False
return render_template(
"dashboard/enter_sudo.html",
password_check_form=password_check_form,
next=request.args.get("next"),
connect_with_proton=proton_enabled,
)
def sudo_required(f):
@wraps(f)
def wrap(*args, **kwargs):
if (
"sudo_time" not in session
or (time() - int(session["sudo_time"])) > _SUDO_GAP
):
return redirect(url_for("dashboard.enter_sudo", next=request.path))
return f(*args, **kwargs)
return wrap

View File

@ -0,0 +1,59 @@
from flask import render_template, flash, redirect, url_for
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import HiddenField, validators
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.log import LOG
from app.models import RecoveryCode, Fido
class FidoManageForm(FlaskForm):
credential_id = HiddenField("credential_id", validators=[validators.DataRequired()])
@dashboard_bp.route("/fido_manage", methods=["GET", "POST"])
@login_required
@sudo_required
def fido_manage():
if not current_user.fido_enabled():
flash("You haven't registered a security key", "warning")
return redirect(url_for("dashboard.index"))
fido_manage_form = FidoManageForm()
if fido_manage_form.validate_on_submit():
credential_id = fido_manage_form.credential_id.data
fido_key = Fido.get_by(uuid=current_user.fido_uuid, credential_id=credential_id)
if not fido_key:
flash("Unknown error, redirect back to manage page", "warning")
return redirect(url_for("dashboard.fido_manage"))
Fido.delete(fido_key.id)
Session.commit()
LOG.d(f"FIDO Key ID={fido_key.id} Removed")
flash(f"Key {fido_key.name} successfully unlinked", "success")
# Disable FIDO for the user if all keys have been deleted
if not Fido.filter_by(uuid=current_user.fido_uuid).all():
current_user.fido_uuid = None
Session.commit()
# user does not have any 2FA enabled left, delete all recovery codes
if not current_user.two_factor_authentication_enabled():
RecoveryCode.empty(current_user)
return redirect(url_for("dashboard.index"))
return redirect(url_for("dashboard.fido_manage"))
return render_template(
"dashboard/fido_manage.html",
fido_manage_form=fido_manage_form,
keys=Fido.filter_by(uuid=current_user.fido_uuid),
)

View File

@ -0,0 +1,126 @@
import json
import secrets
import uuid
import webauthn
from flask import render_template, flash, redirect, url_for, session
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, HiddenField, validators
from app.config import RP_ID, URL
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.log import LOG
from app.models import Fido, RecoveryCode
class FidoTokenForm(FlaskForm):
key_name = StringField("key_name", validators=[validators.DataRequired()])
sk_assertion = HiddenField("sk_assertion", validators=[validators.DataRequired()])
@dashboard_bp.route("/fido_setup", methods=["GET", "POST"])
@login_required
@sudo_required
def fido_setup():
if current_user.fido_uuid is not None:
fidos = Fido.filter_by(uuid=current_user.fido_uuid).all()
else:
fidos = []
fido_token_form = FidoTokenForm()
# Handling POST requests
if fido_token_form.validate_on_submit():
try:
sk_assertion = json.loads(fido_token_form.sk_assertion.data)
except Exception:
flash("Key registration failed. Error: Invalid Payload", "warning")
return redirect(url_for("dashboard.index"))
fido_uuid = session["fido_uuid"]
challenge = session["fido_challenge"]
fido_reg_response = webauthn.WebAuthnRegistrationResponse(
RP_ID,
URL,
sk_assertion,
challenge,
trusted_attestation_cert_required=False,
none_attestation_permitted=True,
)
try:
fido_credential = fido_reg_response.verify()
except Exception as e:
LOG.w(f"An error occurred in WebAuthn registration process: {e}")
flash("Key registration failed.", "warning")
return redirect(url_for("dashboard.index"))
if current_user.fido_uuid is None:
current_user.fido_uuid = fido_uuid
Session.flush()
Fido.create(
credential_id=str(fido_credential.credential_id, "utf-8"),
uuid=fido_uuid,
public_key=str(fido_credential.public_key, "utf-8"),
sign_count=fido_credential.sign_count,
name=fido_token_form.key_name.data,
user_id=current_user.id,
)
Session.commit()
LOG.d(
f"credential_id={str(fido_credential.credential_id, 'utf-8')} added for {fido_uuid}"
)
flash("Security key has been activated", "success")
recovery_codes = RecoveryCode.generate(current_user)
return render_template(
"dashboard/recovery_code.html", recovery_codes=recovery_codes
)
# Prepare information for key registration process
fido_uuid = (
str(uuid.uuid4()) if current_user.fido_uuid is None else current_user.fido_uuid
)
challenge = secrets.token_urlsafe(32)
credential_create_options = webauthn.WebAuthnMakeCredentialOptions(
challenge,
"SimpleLogin",
RP_ID,
fido_uuid,
current_user.email,
current_user.name if current_user.name else current_user.email,
False,
attestation="none",
user_verification="discouraged",
)
# Don't think this one should be used, but it's not configurable by arguments
# https://www.w3.org/TR/webauthn/#sctn-location-extension
registration_dict = credential_create_options.registration_dict
del registration_dict["extensions"]["webauthn.loc"]
# Prevent user from adding duplicated keys
for fido in fidos:
registration_dict["excludeCredentials"].append(
{
"type": "public-key",
"id": fido.credential_id,
"transports": ["usb", "nfc", "ble", "internal"],
}
)
session["fido_uuid"] = fido_uuid
session["fido_challenge"] = challenge.rstrip("=")
return render_template(
"dashboard/fido_setup.html",
fido_token_form=fido_token_form,
credential_create_options=registration_dict,
)

View File

@ -0,0 +1,243 @@
from dataclasses import dataclass
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from app import alias_utils, parallel_limiter
from app.api.serializer import get_alias_infos_with_pagination_v3, get_alias_info_v3
from app.config import ALIAS_LIMIT, PAGE_LIMIT
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
AliasGeneratorEnum,
User,
EmailLog,
Contact,
)
from app.utils import CSRFValidationForm
@dataclass
class Stats:
nb_alias: int
nb_forward: int
nb_reply: int
nb_block: int
def get_stats(user: User) -> Stats:
nb_alias = Alias.filter_by(user_id=user.id).count()
nb_forward = (
Session.query(EmailLog)
.filter_by(user_id=user.id, is_reply=False, blocked=False, bounced=False)
.count()
)
nb_reply = (
Session.query(EmailLog)
.filter_by(user_id=user.id, is_reply=True, blocked=False, bounced=False)
.count()
)
nb_block = (
Session.query(EmailLog)
.filter_by(user_id=user.id, is_reply=False, blocked=True, bounced=False)
.count()
)
return Stats(
nb_alias=nb_alias, nb_forward=nb_forward, nb_reply=nb_reply, nb_block=nb_block
)
@dashboard_bp.route("/", methods=["GET", "POST"])
@limiter.limit(
ALIAS_LIMIT,
methods=["POST"],
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
)
@login_required
@parallel_limiter.lock(
name="alias_creation",
only_when=lambda: request.form.get("form-name") == "create-random-email",
)
def index():
query = request.args.get("query") or ""
sort = request.args.get("sort") or ""
alias_filter = request.args.get("filter") or ""
page = 0
if request.args.get("page"):
page = int(request.args.get("page"))
highlight_alias_id = None
if request.args.get("highlight_alias_id"):
try:
highlight_alias_id = int(request.args.get("highlight_alias_id"))
except ValueError:
LOG.w(
"highlight_alias_id must be a number, received %s",
request.args.get("highlight_alias_id"),
)
csrf_form = CSRFValidationForm()
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "create-custom-email":
if current_user.can_create_new_alias():
return redirect(url_for("dashboard.custom_alias"))
else:
flash("You need to upgrade your plan to create new alias.", "warning")
elif request.form.get("form-name") == "create-random-email":
if current_user.can_create_new_alias():
scheme = int(
request.form.get("generator_scheme") or current_user.alias_generator
)
if not scheme or not AliasGeneratorEnum.has_value(scheme):
scheme = current_user.alias_generator
alias = Alias.create_new_random(user=current_user, scheme=scheme)
alias.mailbox_id = current_user.default_mailbox_id
Session.commit()
LOG.d("create new random alias %s for user %s", alias, current_user)
flash(f"Alias {alias.email} has been created", "success")
return redirect(
url_for(
"dashboard.index",
highlight_alias_id=alias.id,
query=query,
sort=sort,
filter=alias_filter,
)
)
else:
flash("You need to upgrade your plan to create new alias.", "warning")
elif request.form.get("form-name") in ("delete-alias", "disable-alias"):
try:
alias_id = int(request.form.get("alias-id"))
except ValueError:
flash("unknown error", "error")
return redirect(request.url)
alias: Alias = Alias.get(alias_id)
if not alias or alias.user_id != current_user.id:
flash("Unknown error, sorry for the inconvenience", "error")
return redirect(
url_for(
"dashboard.index",
query=query,
sort=sort,
filter=alias_filter,
)
)
if request.form.get("form-name") == "delete-alias":
LOG.d("delete alias %s", alias)
email = alias.email
alias_utils.delete_alias(alias, current_user)
flash(f"Alias {email} has been deleted", "success")
elif request.form.get("form-name") == "disable-alias":
alias.enabled = False
Session.commit()
flash(f"Alias {alias.email} has been disabled", "success")
return redirect(
url_for("dashboard.index", query=query, sort=sort, filter=alias_filter)
)
mailboxes = current_user.mailboxes()
show_intro = False
if not current_user.intro_shown:
LOG.d("Show intro to %s", current_user)
show_intro = True
# to make sure not showing intro to user again
current_user.intro_shown = True
Session.commit()
stats = get_stats(current_user)
mailbox_id = None
if alias_filter and alias_filter.startswith("mailbox:"):
mailbox_id = int(alias_filter[len("mailbox:") :])
directory_id = None
if alias_filter and alias_filter.startswith("directory:"):
directory_id = int(alias_filter[len("directory:") :])
alias_infos = get_alias_infos_with_pagination_v3(
current_user,
page,
query,
sort,
alias_filter,
mailbox_id,
directory_id,
# load 1 alias more to know whether this is the last page
page_limit=PAGE_LIMIT + 1,
)
last_page = len(alias_infos) <= PAGE_LIMIT
# remove the last alias that's added to know whether this is the last page
alias_infos = alias_infos[:PAGE_LIMIT]
# add highlighted alias in case it's not included
if highlight_alias_id and highlight_alias_id not in [
alias_info.alias.id for alias_info in alias_infos
]:
highlight_alias_info = get_alias_info_v3(
current_user, alias_id=highlight_alias_id
)
if highlight_alias_info:
alias_infos.insert(0, highlight_alias_info)
return render_template(
"dashboard/index.html",
alias_infos=alias_infos,
highlight_alias_id=highlight_alias_id,
query=query,
AliasGeneratorEnum=AliasGeneratorEnum,
mailboxes=mailboxes,
show_intro=show_intro,
page=page,
last_page=last_page,
sort=sort,
filter=alias_filter,
stats=stats,
csrf_form=csrf_form,
)
@dashboard_bp.route("/contacts/<int:contact_id>/toggle", methods=["POST"])
@login_required
def toggle_contact(contact_id):
"""
Block/Unblock contact
"""
contact = Contact.get(contact_id)
if not contact or contact.alias.user_id != current_user.id:
return "Forbidden", 403
contact.block_forward = not contact.block_forward
Session.commit()
if contact.block_forward:
toast_msg = f"{contact.website_email} can no longer send emails to {contact.alias.email}"
else:
toast_msg = (
f"{contact.website_email} can now send emails to {contact.alias.email}"
)
return render_template(
"partials/toggle_contact.html", contact=contact, toast_msg=toast_msg
)

View File

@ -0,0 +1,59 @@
from flask import render_template, flash, redirect, url_for
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.config import ADMIN_EMAIL
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import send_email
from app.models import LifetimeCoupon
class CouponForm(FlaskForm):
code = StringField("Coupon Code", validators=[validators.DataRequired()])
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
@login_required
def lifetime_licence():
if current_user.lifetime:
flash("You already have a lifetime licence", "warning")
return redirect(url_for("dashboard.index"))
# user needs to cancel active subscription first
# to avoid being charged
sub = current_user.get_paddle_subscription()
if sub and not sub.cancelled:
flash("Please cancel your current subscription first", "warning")
return redirect(url_for("dashboard.index"))
coupon_form = CouponForm()
if coupon_form.validate_on_submit():
code = coupon_form.code.data
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=code)
if coupon and coupon.nb_used > 0:
coupon.nb_used -= 1
current_user.lifetime = True
current_user.lifetime_coupon_id = coupon.id
if coupon.paid:
current_user.paid_lifetime = True
Session.commit()
# notify admin
send_email(
ADMIN_EMAIL,
subject=f"User {current_user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
plaintext="",
html="",
)
flash("You are upgraded to lifetime premium!", "success")
return redirect(url_for("dashboard.index"))
else:
flash(f"Code *{code}* expired or invalid", "warning")
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)

View File

@ -0,0 +1,210 @@
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from itsdangerous import Signer
from wtforms import validators
from wtforms.fields.html5 import EmailField
from app.config import MAILBOX_SECRET, URL, JOB_DELETE_MAILBOX
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
mailbox_already_used,
render,
send_email,
is_valid_email,
)
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import CSRFValidationForm
class NewMailboxForm(FlaskForm):
email = EmailField(
"email", validators=[validators.DataRequired(), validators.Email()]
)
@dashboard_bp.route("/mailbox", methods=["GET", "POST"])
@login_required
def mailbox_route():
mailboxes = (
Mailbox.filter_by(user_id=current_user.id)
.order_by(Mailbox.created_at.desc())
.all()
)
new_mailbox_form = NewMailboxForm()
csrf_form = CSRFValidationForm()
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "delete":
mailbox_id = request.form.get("mailbox-id")
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("You cannot delete default mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
# Schedule delete account job
LOG.w("schedule delete mailbox job for %s", mailbox)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={"mailbox_id": mailbox.id},
run_at=arrow.now(),
commit=True,
)
flash(
f"Mailbox {mailbox.email} scheduled for deletion."
f"You will receive a confirmation email when the deletion is finished",
"success",
)
return redirect(url_for("dashboard.mailbox_route"))
if request.form.get("form-name") == "set-default":
mailbox_id = request.form.get("mailbox-id")
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("This mailbox is already default one", "error")
return redirect(url_for("dashboard.mailbox_route"))
if not mailbox.verified:
flash("Cannot set unverified mailbox as default", "error")
return redirect(url_for("dashboard.mailbox_route"))
current_user.default_mailbox_id = mailbox.id
Session.commit()
flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success")
return redirect(url_for("dashboard.mailbox_route"))
elif request.form.get("form-name") == "create":
if not current_user.is_premium():
flash("Only premium plan can add additional mailbox", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if new_mailbox_form.validate():
mailbox_email = (
new_mailbox_form.email.data.lower().strip().replace(" ", "")
)
if not is_valid_email(mailbox_email):
flash(f"{mailbox_email} invalid", "error")
elif mailbox_already_used(mailbox_email, current_user):
flash(f"{mailbox_email} already used", "error")
elif not email_can_be_used_as_mailbox(mailbox_email):
flash(f"You cannot use {mailbox_email}.", "error")
else:
new_mailbox = Mailbox.create(
email=mailbox_email, user_id=current_user.id
)
Session.commit()
send_verification_email(current_user, new_mailbox)
flash(
f"You are going to receive an email to confirm {mailbox_email}.",
"success",
)
return redirect(
url_for(
"dashboard.mailbox_detail_route", mailbox_id=new_mailbox.id
)
)
return render_template(
"dashboard/mailbox.html",
mailboxes=mailboxes,
new_mailbox_form=new_mailbox_form,
csrf_form=csrf_form,
)
def delete_mailbox(mailbox_id: int):
from server import create_light_app
with create_light_app().app_context():
mailbox = Mailbox.get(mailbox_id)
if not mailbox:
return
mailbox_email = mailbox.email
user = mailbox.user
Mailbox.delete(mailbox_id)
Session.commit()
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
send_email(
user.email,
f"Your mailbox {mailbox_email} has been deleted",
f"""Mailbox {mailbox_email} along with its aliases are deleted successfully.
Regards,
SimpleLogin team.
""",
)
def send_verification_email(user, mailbox):
s = Signer(MAILBOX_SECRET)
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
verification_url = (
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
)
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
)
@dashboard_bp.route("/mailbox_verify")
def mailbox_verify():
s = Signer(MAILBOX_SECRET)
mailbox_id = request.args.get("mailbox_id")
try:
r_id = int(s.unsign(mailbox_id))
except Exception:
flash("Invalid link. Please delete and re-add your mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
else:
mailbox = Mailbox.get(r_id)
if not mailbox:
flash("Invalid link", "error")
return redirect(url_for("dashboard.mailbox_route"))
mailbox.verified = True
Session.commit()
LOG.d("Mailbox %s is verified", mailbox)
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)

View File

@ -0,0 +1,299 @@
from smtplib import SMTPRecipientsRefused
from email_validator import validate_email, EmailNotValidError
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from itsdangerous import Signer
from wtforms import validators
from wtforms.fields.html5 import EmailField
from app.config import ENFORCE_SPF, MAILBOX_SECRET
from app.config import URL
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import email_can_be_used_as_mailbox
from app.email_utils import mailbox_already_used, render, send_email
from app.log import LOG
from app.models import Alias, AuthorizedAddress
from app.models import Mailbox
from app.pgp_utils import PGPException, load_public_key_and_check
from app.utils import sanitize_email, CSRFValidationForm
class ChangeEmailForm(FlaskForm):
email = EmailField(
"email", validators=[validators.DataRequired(), validators.Email()]
)
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
@login_required
def mailbox_detail_route(mailbox_id):
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
change_email_form = ChangeEmailForm()
csrf_form = CSRFValidationForm()
if mailbox.new_email:
pending_email = mailbox.new_email
else:
pending_email = None
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
if (
request.form.get("form-name") == "update-email"
and change_email_form.validate_on_submit()
):
new_email = sanitize_email(change_email_form.email.data)
if new_email != mailbox.email and not pending_email:
# check if this email is not already used
if mailbox_already_used(new_email, current_user) or Alias.get_by(
email=new_email
):
flash(f"Email {new_email} already used", "error")
elif not email_can_be_used_as_mailbox(new_email):
flash("You cannot use this email address as your mailbox", "error")
else:
mailbox.new_email = new_email
Session.commit()
try:
verify_mailbox_change(current_user, mailbox, new_email)
except SMTPRecipientsRefused:
flash(
f"Incorrect mailbox, please recheck {mailbox.email}",
"error",
)
else:
flash(
f"You are going to receive an email to confirm {new_email}.",
"success",
)
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "force-spf":
if not ENFORCE_SPF:
flash("SPF enforcement globally not enabled", "error")
return redirect(url_for("dashboard.index"))
mailbox.force_spf = (
True if request.form.get("spf-status") == "on" else False
)
Session.commit()
flash(
"SPF enforcement was " + "enabled"
if request.form.get("spf-status")
else "disabled" + " successfully",
"success",
)
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "add-authorized-address":
address = sanitize_email(request.form.get("email"))
try:
validate_email(
address, check_deliverability=False, allow_smtputf8=False
).domain
except EmailNotValidError:
flash(f"invalid {address}", "error")
else:
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
flash(f"{address} already added", "error")
else:
AuthorizedAddress.create(
user_id=current_user.id,
mailbox_id=mailbox.id,
email=address,
commit=True,
)
flash(f"{address} added as authorized address", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "delete-authorized-address":
authorized_address_id = request.form.get("authorized-address-id")
authorized_address: AuthorizedAddress = AuthorizedAddress.get(
authorized_address_id
)
if not authorized_address or authorized_address.mailbox_id != mailbox.id:
flash("Unknown error. Refresh the page", "warning")
else:
address = authorized_address.email
AuthorizedAddress.delete(authorized_address_id)
Session.commit()
flash(f"{address} has been deleted", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "pgp":
if request.form.get("action") == "save":
if not current_user.is_premium():
flash("Only premium plan can add PGP Key", "warning")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
mailbox.pgp_public_key = request.form.get("pgp")
try:
mailbox.pgp_finger_print = load_public_key_and_check(
mailbox.pgp_public_key
)
except PGPException:
flash("Cannot add the public key, please verify it", "error")
else:
Session.commit()
flash("Your PGP public key is saved successfully", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("action") == "remove":
# Free user can decide to remove their added PGP key
mailbox.pgp_public_key = None
mailbox.pgp_finger_print = None
mailbox.disable_pgp = False
Session.commit()
flash("Your PGP public key is removed successfully", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "toggle-pgp":
if request.form.get("pgp-enabled") == "on":
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "success")
else:
mailbox.disable_pgp = True
flash(f"PGP is disabled on {mailbox.email}", "info")
Session.commit()
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("form-name") == "generic-subject":
if request.form.get("action") == "save":
if not mailbox.pgp_enabled():
flash(
"Generic subject can only be used on PGP-enabled mailbox",
"error",
)
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
mailbox.generic_subject = request.form.get("generic-subject")
Session.commit()
flash("Generic subject for PGP-encrypted email is enabled", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("action") == "remove":
mailbox.generic_subject = None
Session.commit()
flash("Generic subject for PGP-encrypted email is disabled", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
spf_available = ENFORCE_SPF
return render_template("dashboard/mailbox_detail.html", **locals())
def verify_mailbox_change(user, mailbox, new_email):
s = Signer(MAILBOX_SECRET)
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
verification_url = (
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
)
send_email(
new_email,
"Confirm mailbox change on SimpleLogin",
render(
"transactional/verify-mailbox-change.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
mailbox_new_email=new_email,
),
render(
"transactional/verify-mailbox-change.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
mailbox_new_email=new_email,
),
)
@dashboard_bp.route(
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
)
@login_required
def cancel_mailbox_change_route(mailbox_id):
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
if mailbox.new_email:
mailbox.new_email = None
Session.commit()
flash("Your mailbox change is cancelled", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
else:
flash("You have no pending mailbox change", "warning")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
@dashboard_bp.route("/mailbox/confirm_change")
def mailbox_confirm_change_route():
s = Signer(MAILBOX_SECRET)
signed_mailbox_id = request.args.get("mailbox_id")
try:
mailbox_id = int(s.unsign(signed_mailbox_id))
except Exception:
flash("Invalid link", "error")
return redirect(url_for("dashboard.index"))
else:
mailbox = Mailbox.get(mailbox_id)
# new_email can be None if user cancels change in the meantime
if mailbox and mailbox.new_email:
user = mailbox.user
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
flash(f"{mailbox.new_email} is already used", "error")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
)
mailbox.email = mailbox.new_email
mailbox.new_email = None
# mark mailbox as verified if the change request is sent from an unverified mailbox
mailbox.verified = True
Session.commit()
LOG.d("Mailbox change %s is verified", mailbox)
flash(f"The {mailbox.email} is updated", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
)
else:
flash("Invalid link", "error")
return redirect(url_for("dashboard.index"))

View File

@ -0,0 +1,31 @@
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_required, current_user
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.models import RecoveryCode
@dashboard_bp.route("/mfa_cancel", methods=["GET", "POST"])
@login_required
@sudo_required
def mfa_cancel():
if not current_user.enable_otp:
flash("you don't have MFA enabled", "warning")
return redirect(url_for("dashboard.index"))
# user cancels TOTP
if request.method == "POST":
current_user.enable_otp = False
current_user.otp_secret = None
Session.commit()
# user does not have any 2FA enabled left, delete all recovery codes
if not current_user.two_factor_authentication_enabled():
RecoveryCode.empty(current_user)
flash("TOTP is now disabled", "warning")
return redirect(url_for("dashboard.index"))
return render_template("dashboard/mfa_cancel.html")

View File

@ -0,0 +1,56 @@
import pyotp
from flask import render_template, flash, redirect, url_for
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.log import LOG
from app.models import RecoveryCode
class OtpTokenForm(FlaskForm):
token = StringField("Token", validators=[validators.DataRequired()])
@dashboard_bp.route("/mfa_setup", methods=["GET", "POST"])
@login_required
@sudo_required
def mfa_setup():
if current_user.enable_otp:
flash("you have already enabled MFA", "warning")
return redirect(url_for("dashboard.index"))
otp_token_form = OtpTokenForm()
if not current_user.otp_secret:
LOG.d("Generate otp_secret for user %s", current_user)
current_user.otp_secret = pyotp.random_base32()
Session.commit()
totp = pyotp.TOTP(current_user.otp_secret)
if otp_token_form.validate_on_submit():
token = otp_token_form.token.data.replace(" ", "")
if totp.verify(token) and current_user.last_otp != token:
current_user.enable_otp = True
current_user.last_otp = token
Session.commit()
flash("MFA has been activated", "success")
recovery_codes = RecoveryCode.generate(current_user)
return render_template(
"dashboard/recovery_code.html", recovery_codes=recovery_codes
)
else:
flash("Incorrect token", "warning")
otp_uri = pyotp.totp.TOTP(current_user.otp_secret).provisioning_uri(
name=current_user.email, issuer_name="SimpleLogin"
)
return render_template(
"dashboard/mfa_setup.html", otp_token_form=otp_token_form, otp_uri=otp_uri
)

View File

@ -0,0 +1,61 @@
from flask import redirect, url_for, flash, render_template, request
from flask_login import login_required, current_user
from app.config import PAGE_LIMIT
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.models import Notification
@dashboard_bp.route("/notification/<notification_id>", methods=["GET", "POST"])
@login_required
def notification_route(notification_id):
notification = Notification.get(notification_id)
if not notification:
flash("Incorrect link. Redirect you to the home page", "warning")
return redirect(url_for("dashboard.index"))
if notification.user_id != current_user.id:
flash(
"You don't have access to this page. Redirect you to the home page",
"warning",
)
return redirect(url_for("dashboard.index"))
if not notification.read:
notification.read = True
Session.commit()
if request.method == "POST":
notification_title = notification.title or notification.message[:20]
Notification.delete(notification_id)
Session.commit()
flash(f"{notification_title} has been deleted", "success")
return redirect(url_for("dashboard.index"))
else:
return render_template("dashboard/notification.html", notification=notification)
@dashboard_bp.route("/notifications", methods=["GET", "POST"])
@login_required
def notifications_route():
page = 0
if request.args.get("page"):
page = int(request.args.get("page"))
notifications = (
Notification.filter_by(user_id=current_user.id)
.order_by(Notification.read, Notification.created_at.desc())
.limit(PAGE_LIMIT + 1) # load a record more to know whether there's more
.offset(page * PAGE_LIMIT)
.all()
)
return render_template(
"dashboard/notifications.html",
notifications=notifications,
page=page,
last_page=len(notifications) <= PAGE_LIMIT,
)

View File

@ -0,0 +1,101 @@
import arrow
from coinbase_commerce import Client
from flask import render_template, flash, redirect, url_for
from flask_login import login_required, current_user
from app.config import (
PADDLE_VENDOR_ID,
PADDLE_MONTHLY_PRODUCT_ID,
PADDLE_YEARLY_PRODUCT_ID,
URL,
COINBASE_YEARLY_PRICE,
COINBASE_API_KEY,
)
from app.dashboard.base import dashboard_bp
from app.extensions import limiter
from app.log import LOG
from app.models import (
AppleSubscription,
Subscription,
ManualSubscription,
CoinbaseSubscription,
PartnerUser,
PartnerSubscription,
)
from app.proton.utils import get_proton_partner
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
@login_required
def pricing():
if current_user.lifetime:
flash("You already have a lifetime subscription", "error")
return redirect(url_for("dashboard.index"))
paddle_sub: Subscription = current_user.get_paddle_subscription()
# user who has canceled can re-subscribe
if paddle_sub and not paddle_sub.cancelled:
flash("You already have an active subscription", "error")
return redirect(url_for("dashboard.index"))
now = arrow.now()
manual_sub: ManualSubscription = ManualSubscription.filter(
ManualSubscription.user_id == current_user.id, ManualSubscription.end_at > now
).first()
coinbase_sub = CoinbaseSubscription.filter(
CoinbaseSubscription.user_id == current_user.id,
CoinbaseSubscription.end_at > now,
).first()
apple_sub: AppleSubscription = AppleSubscription.get_by(user_id=current_user.id)
if apple_sub and apple_sub.is_valid():
flash("Please make sure to cancel your subscription on Apple first", "warning")
proton_upgrade = False
partner_user = PartnerUser.get_by(user_id=current_user.id)
if partner_user:
partner_sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
if partner_sub and partner_sub.is_active():
flash(
f"You already have a subscription provided by {partner_user.partner.name}",
"error",
)
return redirect(url_for("dashboard.index"))
proton_upgrade = partner_user.partner_id == get_proton_partner().id
return render_template(
"dashboard/pricing.html",
PADDLE_VENDOR_ID=PADDLE_VENDOR_ID,
PADDLE_MONTHLY_PRODUCT_ID=PADDLE_MONTHLY_PRODUCT_ID,
PADDLE_YEARLY_PRODUCT_ID=PADDLE_YEARLY_PRODUCT_ID,
success_url=URL + "/dashboard/subscription_success",
manual_sub=manual_sub,
coinbase_sub=coinbase_sub,
now=now,
proton_upgrade=proton_upgrade,
)
@dashboard_bp.route("/subscription_success")
@login_required
def subscription_success():
flash("Thanks so much for supporting SimpleLogin!", "success")
return redirect(url_for("dashboard.index"))
@dashboard_bp.route("/coinbase_checkout")
@login_required
@limiter.limit("5/minute")
def coinbase_checkout_route():
client = Client(api_key=COINBASE_API_KEY)
charge = client.charge.create(
name="1 Year SimpleLogin Premium Subscription",
local_price={"amount": str(COINBASE_YEARLY_PRICE), "currency": "USD"},
pricing_type="fixed_price",
metadata={"user_id": current_user.id},
)
LOG.d("Create coinbase charge %s", charge)
return redirect(charge["hosted_url"])

View File

@ -0,0 +1,74 @@
import re2 as re
from flask import render_template, request, flash, redirect, url_for
from flask_login import login_required, current_user
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.models import Referral, Payout
_REFERRAL_PATTERN = r"[0-9a-z-_]{3,}"
@dashboard_bp.route("/referral", methods=["GET", "POST"])
@login_required
def referral_route():
if request.method == "POST":
if request.form.get("form-name") == "create":
code = request.form.get("code")
if re.fullmatch(_REFERRAL_PATTERN, code) is None:
flash(
"At least 3 characters. Only lowercase letters, "
"numbers, dashes (-) and underscores (_) are currently supported.",
"error",
)
return redirect(url_for("dashboard.referral_route"))
if Referral.get_by(code=code):
flash("Code already used", "error")
return redirect(url_for("dashboard.referral_route"))
name = request.form.get("name")
referral = Referral.create(user_id=current_user.id, code=code, name=name)
Session.commit()
flash("A new referral code has been created", "success")
return redirect(
url_for("dashboard.referral_route", highlight_id=referral.id)
)
elif request.form.get("form-name") == "update":
referral_id = request.form.get("referral-id")
referral = Referral.get(referral_id)
if referral and referral.user_id == current_user.id:
referral.name = request.form.get("name")
Session.commit()
flash("Referral name updated", "success")
return redirect(
url_for("dashboard.referral_route", highlight_id=referral.id)
)
elif request.form.get("form-name") == "delete":
referral_id = request.form.get("referral-id")
referral = Referral.get(referral_id)
if referral and referral.user_id == current_user.id:
Referral.delete(referral.id)
Session.commit()
flash("Referral deleted", "success")
return redirect(url_for("dashboard.referral_route"))
# Highlight a referral
highlight_id = request.args.get("highlight_id")
if highlight_id:
highlight_id = int(highlight_id)
referrals = Referral.filter_by(user_id=current_user.id).all()
# make sure the highlighted referral is the first referral
highlight_index = None
for ix, referral in enumerate(referrals):
if referral.id == highlight_id:
highlight_index = ix
break
if highlight_index:
referrals.insert(0, referrals.pop(highlight_index))
payouts = Payout.filter_by(user_id=current_user.id).all()
return render_template("dashboard/referral.html", **locals())

View File

@ -0,0 +1,39 @@
from flask import render_template, request
from flask_login import login_required, current_user
from app.dashboard.base import dashboard_bp
from app.log import LOG
from app.models import EmailLog
@dashboard_bp.route("/refused_email", methods=["GET", "POST"])
@login_required
def refused_email_route():
# Highlight a refused email
highlight_id = request.args.get("highlight_id")
if highlight_id:
try:
highlight_id = int(highlight_id)
except ValueError:
LOG.w("Cannot parse highlight_id %s", highlight_id)
highlight_id = None
email_logs: [EmailLog] = (
EmailLog.filter(
EmailLog.user_id == current_user.id, EmailLog.refused_email_id.isnot(None)
)
.order_by(EmailLog.id.desc())
.all()
)
# make sure the highlighted email_log is the first email_log
highlight_index = None
for ix, email_log in enumerate(email_logs):
if email_log.id == highlight_id:
highlight_index = ix
break
if highlight_index:
email_logs.insert(0, email_logs.pop(highlight_index))
return render_template("dashboard/refused_email.html", **locals())

Some files were not shown because too many files have changed in this diff Show More