Compare commits
No commits in common. "main" and "4.49.8" have entirely different histories.
60
app/.github/workflows/main.yml
vendored
60
app/.github/workflows/main.yml
vendored
@ -1,12 +1,6 @@
|
|||||||
name: SimpleLogin actions
|
name: Test and lint
|
||||||
|
|
||||||
on:
|
on: [push, pull_request]
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
tags:
|
|
||||||
- v*
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@ -15,29 +9,35 @@ jobs:
|
|||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install poetry
|
||||||
uses: astral-sh/setup-uv@v5
|
run: pipx install poetry
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
# Install a specific version of uv.
|
python-version: '3.10'
|
||||||
version: "0.5.21"
|
cache: 'poetry'
|
||||||
enable-cache: true
|
|
||||||
|
|
||||||
- name: Install OS dependencies
|
- name: Install OS dependencies
|
||||||
|
if: ${{ matrix.python-version }} == '3.10'
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install -y libre2-dev libpq-dev
|
sudo apt install -y libre2-dev libpq-dev
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.setup-uv.outputs.cache-hit != 'true'
|
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||||
run: uv sync --locked --all-extras
|
run: poetry install --no-interaction
|
||||||
|
|
||||||
- name: Check formatting & linting
|
- name: Check formatting & linting
|
||||||
run: |
|
run: |
|
||||||
uv run pre-commit run --all-files
|
poetry run pre-commit run --all-files
|
||||||
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
max-parallel: 4
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.10"]
|
||||||
|
|
||||||
# service containers to run with `postgres-job`
|
# service containers to run with `postgres-job`
|
||||||
services:
|
services:
|
||||||
@ -69,21 +69,23 @@ jobs:
|
|||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install poetry
|
||||||
uses: astral-sh/setup-uv@v5
|
run: pipx install poetry
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
# Install a specific version of uv.
|
python-version: ${{ matrix.python-version }}
|
||||||
version: "0.5.21"
|
cache: 'poetry'
|
||||||
enable-cache: true
|
|
||||||
|
|
||||||
- name: Install OS dependencies
|
- name: Install OS dependencies
|
||||||
|
if: ${{ matrix.python-version }} == '3.10'
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install -y libre2-dev libpq-dev
|
sudo apt install -y libre2-dev libpq-dev
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.setup-uv.outputs.cache-hit != 'true'
|
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||||
run: uv sync --locked --all-extras
|
run: poetry install --no-interaction
|
||||||
|
|
||||||
|
|
||||||
- name: Start Redis v6
|
- name: Start Redis v6
|
||||||
@ -93,21 +95,21 @@ jobs:
|
|||||||
|
|
||||||
- name: Run db migration
|
- name: Run db migration
|
||||||
run: |
|
run: |
|
||||||
CONFIG=tests/test.env uv run alembic upgrade head
|
CONFIG=tests/test.env poetry run alembic upgrade head
|
||||||
|
|
||||||
- name: Prepare version file
|
- name: Prepare version file
|
||||||
run: |
|
run: |
|
||||||
scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
|
scripts/generate-build-info.sh ${{ github.sha }}
|
||||||
cat app/build_info.py
|
cat app/build_info.py
|
||||||
|
|
||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: |
|
run: |
|
||||||
uv run pytest
|
poetry run pytest
|
||||||
env:
|
env:
|
||||||
GITHUB_ACTIONS_TEST: true
|
GITHUB_ACTIONS_TEST: true
|
||||||
|
|
||||||
- name: Archive code coverage results
|
- name: Archive code coverage results
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: code-coverage-report
|
name: code-coverage-report
|
||||||
path: htmlcov
|
path: htmlcov
|
||||||
@ -154,14 +156,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare version file
|
- name: Prepare version file
|
||||||
run: |
|
run: |
|
||||||
scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
|
scripts/generate-build-info.sh ${{ github.sha }}
|
||||||
cat app/build_info.py
|
cat app/build_info.py
|
||||||
|
|
||||||
- name: Build image and publish to Docker Registry
|
- name: Build image and publish to Docker Registry
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
3.12.8
|
|
@ -20,7 +20,7 @@ SimpleLogin backend consists of 2 main components:
|
|||||||
## Install dependencies
|
## Install dependencies
|
||||||
|
|
||||||
The project requires:
|
The project requires:
|
||||||
- Python 3.10 and uv to manage dependencies
|
- Python 3.10 and poetry to manage dependencies
|
||||||
- Node v10 for front-end.
|
- Node v10 for front-end.
|
||||||
- Postgres 13+
|
- Postgres 13+
|
||||||
|
|
||||||
@ -28,7 +28,7 @@ First, install all dependencies by running the following command.
|
|||||||
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv sync
|
poetry sync
|
||||||
```
|
```
|
||||||
|
|
||||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||||
@ -55,7 +55,7 @@ brew install -s re2 pybind11
|
|||||||
We use pre-commit to run all our linting and static analysis checks. Please run
|
We use pre-commit to run all our linting and static analysis checks. Please run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv run pre-commit install
|
poetry run pre-commit install
|
||||||
```
|
```
|
||||||
|
|
||||||
To install it in your development environment.
|
To install it in your development environment.
|
||||||
@ -160,25 +160,25 @@ Here are the small sum-ups of the directory structures and their roles:
|
|||||||
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||||
|
|
||||||
```
|
```
|
||||||
uv run ruff format .
|
poetry run ruff format .
|
||||||
```
|
```
|
||||||
|
|
||||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv run flake8
|
poetry run flake8
|
||||||
```
|
```
|
||||||
|
|
||||||
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv run djlint --check templates
|
poetry run djlint --check templates
|
||||||
```
|
```
|
||||||
|
|
||||||
If some files aren't properly formatted, you can format all files with
|
If some files aren't properly formatted, you can format all files with
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv run djlint --reformat .
|
poetry run djlint --reformat .
|
||||||
```
|
```
|
||||||
|
|
||||||
## Test sending email
|
## Test sending email
|
||||||
@ -215,7 +215,7 @@ python email_handler.py
|
|||||||
4) Send a test email
|
4) Send a test email
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
swaks --to e1@sl.lan --from hey@google.com --server 127.0.0.1:20381
|
swaks --to e1@sl.local --from hey@google.com --server 127.0.0.1:20381
|
||||||
```
|
```
|
||||||
|
|
||||||
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
||||||
@ -239,13 +239,13 @@ brew install python3.10
|
|||||||
# make sure to update the PATH so python, pip point to Python3
|
# make sure to update the PATH so python, pip point to Python3
|
||||||
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
||||||
|
|
||||||
# Although pipx is the recommended way to install uv,
|
# Although pipx is the recommended way to install poetry,
|
||||||
# install pipx via brew will automatically install python 3.12
|
# install pipx via brew will automatically install python 3.12
|
||||||
# and uv will then use python 3.12
|
# and poetry will then use python 3.12
|
||||||
# so we recommend using uv this way instead
|
# so we recommend using poetry this way instead
|
||||||
curl -sSL https://install.python-uv.org | python3 -
|
curl -sSL https://install.python-poetry.org | python3 -
|
||||||
|
|
||||||
uv install
|
poetry install
|
||||||
|
|
||||||
# activate the virtualenv and you should be good to go!
|
# activate the virtualenv and you should be good to go!
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
|
@ -4,47 +4,43 @@ WORKDIR /code
|
|||||||
COPY ./static/package*.json /code/static/
|
COPY ./static/package*.json /code/static/
|
||||||
RUN cd /code/static && npm ci
|
RUN cd /code/static && npm ci
|
||||||
|
|
||||||
FROM --platform=linux/amd64 ubuntu:22.04
|
# Main image
|
||||||
|
FROM python:3.10
|
||||||
ARG UV_VERSION="0.5.21"
|
|
||||||
ARG UV_HASH="e108c300eafae22ad8e6d94519605530f18f8762eb58d2b98a617edfb5d088fc"
|
|
||||||
|
|
||||||
# Keeps Python from generating .pyc files in the container
|
# Keeps Python from generating .pyc files in the container
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
# Turns off buffering for easier container logging
|
# Turns off buffering for easier container logging
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
# Add poetry to PATH
|
||||||
|
ENV PATH="${PATH}:/root/.local/bin"
|
||||||
|
|
||||||
WORKDIR /code
|
WORKDIR /code
|
||||||
|
|
||||||
# Copy dependency files
|
# Copy poetry files
|
||||||
COPY pyproject.toml uv.lock .python-version ./
|
COPY poetry.lock pyproject.toml ./
|
||||||
|
|
||||||
# Install deps
|
# Install and setup poetry
|
||||||
RUN apt-get update \
|
RUN pip install -U pip \
|
||||||
&& apt-get install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev build-essential pkg-config cmake ninja-build bash clang \
|
&& apt-get update \
|
||||||
&& curl -sSL "https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-x86_64-unknown-linux-gnu.tar.gz" > uv.tar.gz \
|
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||||
&& echo "${UV_HASH} uv.tar.gz" | sha256sum -c - \
|
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||||
&& tar xf uv.tar.gz -C /tmp/ \
|
# Remove curl and netcat from the image
|
||||||
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uv /usr/bin/uv \
|
&& apt-get purge -y curl netcat-traditional \
|
||||||
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uvx /usr/bin/uvx \
|
# Run poetry
|
||||||
&& rm -rf /tmp/uv* \
|
&& poetry config virtualenvs.create false \
|
||||||
&& rm -f uv.tar.gz \
|
&& poetry install --no-interaction --no-ansi --no-root \
|
||||||
&& uv python install `cat .python-version` \
|
# Clear apt cache \
|
||||||
&& uv sync --locked \
|
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||||
&& apt-get autoremove -y \
|
|
||||||
&& apt-get purge -y curl netcat-traditional build-essential pkg-config cmake ninja-build python3-dev clang\
|
|
||||||
&& apt-get autoremove -y \
|
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy code
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# copy npm packages
|
# copy npm packages
|
||||||
COPY --from=npm /code /code
|
COPY --from=npm /code /code
|
||||||
|
|
||||||
ENV PATH="/code/.venv/bin:$PATH"
|
# copy everything else into /code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
EXPOSE 7777
|
EXPOSE 7777
|
||||||
|
|
||||||
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
|
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
|
||||||
|
@ -84,7 +84,7 @@ For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplici
|
|||||||
|
|
||||||
### DNS
|
### DNS
|
||||||
|
|
||||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to force using absolute domain.
|
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain.
|
||||||
|
|
||||||
|
|
||||||
#### MX record
|
#### MX record
|
||||||
|
@ -7,4 +7,8 @@ If you want be up to date on security patches, make sure your SimpleLogin image
|
|||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
If you want to report a vulnerability, please take a look at our bug bounty program at https://proton.me/security/bug-bounty.
|
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io.
|
||||||
|
We will review the potential threat and fix it as fast as we can.
|
||||||
|
|
||||||
|
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.
|
||||||
|
|
||||||
|
@ -3,17 +3,12 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import sqlalchemy.exc
|
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
from newrelic import agent
|
from newrelic import agent
|
||||||
from psycopg2.errors import UniqueViolation
|
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import send_welcome_email
|
from app.email_utils import send_welcome_email
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
|
||||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
|
||||||
from app.partner_user_utils import create_partner_user, create_partner_subscription
|
|
||||||
from app.utils import sanitize_email, canonicalize_email
|
from app.utils import sanitize_email, canonicalize_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AccountAlreadyLinkedToAnotherPartnerException,
|
AccountAlreadyLinkedToAnotherPartnerException,
|
||||||
@ -28,14 +23,12 @@ from app.models import (
|
|||||||
User,
|
User,
|
||||||
Alias,
|
Alias,
|
||||||
)
|
)
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import random_string
|
from app.utils import random_string
|
||||||
|
|
||||||
|
|
||||||
class SLPlanType(Enum):
|
class SLPlanType(Enum):
|
||||||
Free = 1
|
Free = 1
|
||||||
Premium = 2
|
Premium = 2
|
||||||
PremiumLifetime = 3
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -59,26 +52,8 @@ class LinkResult:
|
|||||||
strategy: str
|
strategy: str
|
||||||
|
|
||||||
|
|
||||||
def send_user_plan_changed_event(
|
|
||||||
partner_user: PartnerUser,
|
|
||||||
) -> UserPlanChanged:
|
|
||||||
subscription_end = partner_user.user.get_active_subscription_end(
|
|
||||||
include_partner_subscription=False
|
|
||||||
)
|
|
||||||
if partner_user.user.lifetime:
|
|
||||||
event = UserPlanChanged(lifetime=True)
|
|
||||||
elif subscription_end:
|
|
||||||
event = UserPlanChanged(plan_end_time=subscription_end.timestamp)
|
|
||||||
else:
|
|
||||||
event = UserPlanChanged(plan_end_time=None)
|
|
||||||
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
|
||||||
Session.flush()
|
|
||||||
return event
|
|
||||||
|
|
||||||
|
|
||||||
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||||
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||||
is_lifetime = plan.type == SLPlanType.PremiumLifetime
|
|
||||||
if plan.type == SLPlanType.Free:
|
if plan.type == SLPlanType.Free:
|
||||||
if sub is not None:
|
if sub is not None:
|
||||||
LOG.i(
|
LOG.i(
|
||||||
@ -87,37 +62,24 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
|||||||
PartnerSubscription.delete(sub.id)
|
PartnerSubscription.delete(sub.id)
|
||||||
agent.record_custom_event("PlanChange", {"plan": "free"})
|
agent.record_custom_event("PlanChange", {"plan": "free"})
|
||||||
else:
|
else:
|
||||||
end_time = plan.expiration
|
|
||||||
if plan.type == SLPlanType.PremiumLifetime:
|
|
||||||
end_time = None
|
|
||||||
if sub is None:
|
if sub is None:
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] with {end_time} / {is_lifetime}"
|
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||||
)
|
)
|
||||||
create_partner_subscription(
|
PartnerSubscription.create(
|
||||||
partner_user=partner_user,
|
partner_user_id=partner_user.id,
|
||||||
expiration=end_time,
|
end_at=plan.expiration,
|
||||||
lifetime=is_lifetime,
|
|
||||||
msg="Upgraded via partner. User did not have a previous partner subscription",
|
|
||||||
)
|
)
|
||||||
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
||||||
else:
|
else:
|
||||||
if sub.end_at != plan.expiration or sub.lifetime != is_lifetime:
|
if sub.end_at != plan.expiration:
|
||||||
|
LOG.i(
|
||||||
|
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||||
|
)
|
||||||
agent.record_custom_event(
|
agent.record_custom_event(
|
||||||
"PlanChange", {"plan": "premium", "type": "extension"}
|
"PlanChange", {"plan": "premium", "type": "extension"}
|
||||||
)
|
)
|
||||||
sub.end_at = plan.expiration if not is_lifetime else None
|
sub.end_at = plan.expiration
|
||||||
sub.lifetime = is_lifetime
|
|
||||||
LOG.i(
|
|
||||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] to {sub.end_at} / {sub.lifetime} "
|
|
||||||
)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=partner_user.user,
|
|
||||||
action=UserAuditLogAction.SubscriptionExtended,
|
|
||||||
message="Extended partner subscription",
|
|
||||||
)
|
|
||||||
Session.flush()
|
|
||||||
send_user_plan_changed_event(partner_user)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
@ -136,13 +98,12 @@ def ensure_partner_user_exists_for_user(
|
|||||||
if res and res.partner_id != partner.id:
|
if res and res.partner_id != partner.id:
|
||||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||||
if not res:
|
if not res:
|
||||||
res = create_partner_user(
|
res = PartnerUser.create(
|
||||||
user=sl_user,
|
user_id=sl_user.id,
|
||||||
partner_id=partner.id,
|
partner_id=partner.id,
|
||||||
partner_email=link_request.email,
|
partner_email=link_request.email,
|
||||||
external_user_id=link_request.external_user_id,
|
external_user_id=link_request.external_user_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
||||||
@ -170,9 +131,8 @@ class ClientMergeStrategy(ABC):
|
|||||||
|
|
||||||
class NewUserStrategy(ClientMergeStrategy):
|
class NewUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
canonical_email = canonicalize_email(self.link_request.email)
|
|
||||||
try:
|
|
||||||
# Will create a new SL User with a random password
|
# Will create a new SL User with a random password
|
||||||
|
canonical_email = canonicalize_email(self.link_request.email)
|
||||||
new_user = User.create(
|
new_user = User.create(
|
||||||
email=canonical_email,
|
email=canonical_email,
|
||||||
name=self.link_request.name,
|
name=self.link_request.name,
|
||||||
@ -180,49 +140,8 @@ class NewUserStrategy(ClientMergeStrategy):
|
|||||||
activated=True,
|
activated=True,
|
||||||
from_partner=self.link_request.from_partner,
|
from_partner=self.link_request.from_partner,
|
||||||
)
|
)
|
||||||
self.create_partner_user(new_user)
|
partner_user = PartnerUser.create(
|
||||||
Session.commit()
|
user_id=new_user.id,
|
||||||
|
|
||||||
if not new_user.created_by_partner:
|
|
||||||
send_welcome_email(new_user)
|
|
||||||
|
|
||||||
agent.record_custom_event(
|
|
||||||
"PartnerUserCreation", {"partner": self.partner.name}
|
|
||||||
)
|
|
||||||
|
|
||||||
return LinkResult(
|
|
||||||
user=new_user,
|
|
||||||
strategy=self.__class__.__name__,
|
|
||||||
)
|
|
||||||
except (UniqueViolation, sqlalchemy.exc.IntegrityError) as e:
|
|
||||||
Session.rollback()
|
|
||||||
LOG.debug(f"Got the duplicate user error: {e}")
|
|
||||||
return self.create_missing_link(canonical_email)
|
|
||||||
|
|
||||||
def create_missing_link(self, canonical_email: str):
|
|
||||||
# If there's a unique key violation due to race conditions try to create only the partner if needed
|
|
||||||
partner_user = PartnerUser.get_by(
|
|
||||||
external_user_id=self.link_request.external_user_id,
|
|
||||||
partner_id=self.partner.id,
|
|
||||||
)
|
|
||||||
if partner_user is None:
|
|
||||||
# Get the user by canonical email and if not by normal email
|
|
||||||
user = User.get_by(email=canonical_email) or User.get_by(
|
|
||||||
email=self.link_request.email
|
|
||||||
)
|
|
||||||
if not user:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Tried to create only partner on UniqueViolation but cannot find the user"
|
|
||||||
)
|
|
||||||
partner_user = self.create_partner_user(user)
|
|
||||||
Session.commit()
|
|
||||||
return LinkResult(
|
|
||||||
user=partner_user.user, strategy=ExistingUnlinkedUserStrategy.__name__
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_partner_user(self, new_user: User):
|
|
||||||
partner_user = create_partner_user(
|
|
||||||
user=new_user,
|
|
||||||
partner_id=self.partner.id,
|
partner_id=self.partner.id,
|
||||||
external_user_id=self.link_request.external_user_id,
|
external_user_id=self.link_request.external_user_id,
|
||||||
partner_email=self.link_request.email,
|
partner_email=self.link_request.email,
|
||||||
@ -234,7 +153,17 @@ class NewUserStrategy(ClientMergeStrategy):
|
|||||||
partner_user,
|
partner_user,
|
||||||
self.link_request.plan,
|
self.link_request.plan,
|
||||||
)
|
)
|
||||||
return partner_user
|
Session.commit()
|
||||||
|
|
||||||
|
if not new_user.created_by_partner:
|
||||||
|
send_welcome_email(new_user)
|
||||||
|
|
||||||
|
agent.record_custom_event("PartnerUserCreation", {"partner": self.partner.name})
|
||||||
|
|
||||||
|
return LinkResult(
|
||||||
|
user=new_user,
|
||||||
|
strategy=self.__class__.__name__,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||||
@ -271,7 +200,7 @@ def get_login_strategy(
|
|||||||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||||
|
|
||||||
|
|
||||||
def check_alias(email: str):
|
def check_alias(email: str) -> bool:
|
||||||
alias = Alias.get_by(email=email)
|
alias = Alias.get_by(email=email)
|
||||||
if alias is not None:
|
if alias is not None:
|
||||||
raise AccountIsUsingAliasAsEmail()
|
raise AccountIsUsingAliasAsEmail()
|
||||||
@ -346,26 +275,10 @@ def switch_already_linked_user(
|
|||||||
LOG.i(
|
LOG.i(
|
||||||
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=other_partner_user.user,
|
|
||||||
action=UserAuditLogAction.UnlinkAccount,
|
|
||||||
message=f"Deleting partner_user {other_partner_user.id} (external_user_id={other_partner_user.external_user_id} | partner_email={other_partner_user.partner_email}) from user {current_user.id}, as we received a new link request for the same partner",
|
|
||||||
)
|
|
||||||
PartnerUser.delete(other_partner_user.id)
|
PartnerUser.delete(other_partner_user.id)
|
||||||
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
||||||
# Link this partner_user to the current user
|
# Link this partner_user to the current user
|
||||||
emit_user_audit_log(
|
|
||||||
user=partner_user.user,
|
|
||||||
action=UserAuditLogAction.UnlinkAccount,
|
|
||||||
message=f"Unlinking from partner, as user will now be tied to another external account. old=(id={partner_user.user.id} | email={partner_user.user.email}) | new=(id={current_user.id} | email={current_user.email})",
|
|
||||||
)
|
|
||||||
partner_user.user_id = current_user.id
|
partner_user.user_id = current_user.id
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.LinkAccount,
|
|
||||||
message=f"Linking user {current_user.id} ({current_user.email}) to partner_user:{partner_user.id} (external_user_id={partner_user.external_user_id} | partner_email={partner_user.partner_email})",
|
|
||||||
)
|
|
||||||
# Set plan
|
# Set plan
|
||||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -1,29 +1,21 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from typing import Optional
|
||||||
from typing import Optional, List
|
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from flask import redirect, url_for, request, flash, Response
|
|
||||||
from flask_admin import BaseView
|
from flask_admin import BaseView
|
||||||
|
from flask_admin.form import SecureForm
|
||||||
|
from flask_admin.model.template import EndpointLinkRowAction
|
||||||
|
from markupsafe import Markup
|
||||||
|
|
||||||
|
from app import models, s3
|
||||||
|
from flask import redirect, url_for, request, flash, Response
|
||||||
from flask_admin import expose, AdminIndexView
|
from flask_admin import expose, AdminIndexView
|
||||||
from flask_admin.actions import action
|
from flask_admin.actions import action
|
||||||
from flask_admin.contrib import sqla
|
from flask_admin.contrib import sqla
|
||||||
from flask_admin.form import SecureForm
|
|
||||||
from flask_admin.model.template import EndpointLinkRowAction
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from markupsafe import Markup
|
|
||||||
|
|
||||||
from app import models, s3, config
|
|
||||||
from app.custom_domain_validation import (
|
|
||||||
CustomDomainValidation,
|
|
||||||
DomainValidationResult,
|
|
||||||
ExpectedValidationRecords,
|
|
||||||
)
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_network_dns_client
|
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
|
||||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
|
||||||
from app.models import (
|
from app.models import (
|
||||||
User,
|
User,
|
||||||
ManualSubscription,
|
ManualSubscription,
|
||||||
@ -42,14 +34,8 @@ from app.models import (
|
|||||||
DeletedAlias,
|
DeletedAlias,
|
||||||
DomainDeletedAlias,
|
DomainDeletedAlias,
|
||||||
PartnerUser,
|
PartnerUser,
|
||||||
AliasMailbox,
|
|
||||||
AliasAuditLog,
|
|
||||||
UserAuditLog,
|
|
||||||
CustomDomain,
|
|
||||||
)
|
)
|
||||||
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
||||||
from app.proton.proton_unlink import perform_proton_account_unlink
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
def _admin_action_formatter(view, context, model, name):
|
def _admin_action_formatter(view, context, model, name):
|
||||||
@ -126,7 +112,7 @@ class SLAdminIndexView(AdminIndexView):
|
|||||||
if not current_user.is_authenticated or not current_user.is_admin:
|
if not current_user.is_authenticated or not current_user.is_admin:
|
||||||
return redirect(url_for("auth.login", next=request.url))
|
return redirect(url_for("auth.login", next=request.url))
|
||||||
|
|
||||||
return redirect(url_for("admin.email_search.index"))
|
return redirect("/admin/user")
|
||||||
|
|
||||||
|
|
||||||
class UserAdmin(SLModelView):
|
class UserAdmin(SLModelView):
|
||||||
@ -362,40 +348,15 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
|
|||||||
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
||||||
else:
|
else:
|
||||||
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message=f"Admin {current_user.email} extended manual subscription to user {user.email}",
|
|
||||||
)
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
user=user,
|
|
||||||
content=EventContent(
|
|
||||||
user_plan_change=UserPlanChanged(
|
|
||||||
plan_end_time=manual_sub.end_at.timestamp
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
||||||
else:
|
continue
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
ManualSubscription.create(
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message=f"Admin {current_user.email} created manual subscription to user {user.email}",
|
|
||||||
)
|
|
||||||
manual_sub = ManualSubscription.create(
|
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
end_at=arrow.now().shift(years=1, days=1),
|
end_at=arrow.now().shift(years=1, days=1),
|
||||||
comment=way,
|
comment=way,
|
||||||
is_giveaway=is_giveaway,
|
is_giveaway=is_giveaway,
|
||||||
)
|
)
|
||||||
EventDispatcher.send_event(
|
|
||||||
user=user,
|
|
||||||
content=EventContent(
|
|
||||||
user_plan_change=UserPlanChanged(
|
|
||||||
plan_end_time=manual_sub.end_at.timestamp
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -489,7 +450,14 @@ class ManualSubscriptionAdmin(SLModelView):
|
|||||||
"Extend 1 year more?",
|
"Extend 1 year more?",
|
||||||
)
|
)
|
||||||
def extend_1y(self, ids):
|
def extend_1y(self, ids):
|
||||||
self.__extend_manual_subscription(ids, msg="1 year", years=1)
|
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||||
|
ms.end_at = ms.end_at.shift(years=1)
|
||||||
|
flash(f"Extend subscription for 1 year for {ms.user}", "success")
|
||||||
|
AdminAuditLog.extend_subscription(
|
||||||
|
current_user.id, ms.user.id, ms.end_at, "1 year"
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
@action(
|
@action(
|
||||||
"extend_1m",
|
"extend_1m",
|
||||||
@ -497,26 +465,11 @@ class ManualSubscriptionAdmin(SLModelView):
|
|||||||
"Extend 1 month more?",
|
"Extend 1 month more?",
|
||||||
)
|
)
|
||||||
def extend_1m(self, ids):
|
def extend_1m(self, ids):
|
||||||
self.__extend_manual_subscription(ids, msg="1 month", months=1)
|
|
||||||
|
|
||||||
def __extend_manual_subscription(self, ids: List[int], msg: str, **kwargs):
|
|
||||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||||
sub: ManualSubscription = ms
|
ms.end_at = ms.end_at.shift(months=1)
|
||||||
sub.end_at = sub.end_at.shift(**kwargs)
|
flash(f"Extend subscription for 1 month for {ms.user}", "success")
|
||||||
flash(f"Extend subscription for {msg} for {sub.user}", "success")
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=sub.user,
|
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message=f"Admin {current_user.email} extended manual subscription for {msg} for {sub.user}",
|
|
||||||
)
|
|
||||||
AdminAuditLog.extend_subscription(
|
AdminAuditLog.extend_subscription(
|
||||||
current_user.id, sub.user.id, sub.end_at, msg
|
current_user.id, ms.user.id, ms.end_at, "1 month"
|
||||||
)
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
user=sub.user,
|
|
||||||
content=EventContent(
|
|
||||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -781,54 +734,24 @@ class InvalidMailboxDomainAdmin(SLModelView):
|
|||||||
|
|
||||||
|
|
||||||
class EmailSearchResult:
|
class EmailSearchResult:
|
||||||
def __init__(self):
|
no_match: bool = True
|
||||||
self.no_match: bool = True
|
alias: Optional[Alias] = None
|
||||||
self.alias: Optional[Alias] = None
|
mailbox: list[Mailbox] = []
|
||||||
self.alias_audit_log: Optional[List[AliasAuditLog]] = None
|
mailbox_count: int = 0
|
||||||
self.mailbox: List[Mailbox] = []
|
deleted_alias: Optional[DeletedAlias] = None
|
||||||
self.mailbox_count: int = 0
|
deleted_custom_alias: Optional[DomainDeletedAlias] = None
|
||||||
self.deleted_alias: Optional[DeletedAlias] = None
|
user: Optional[User] = None
|
||||||
self.deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
|
||||||
self.domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
|
||||||
self.domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
|
||||||
self.user: Optional[User] = None
|
|
||||||
self.user_audit_log: Optional[List[UserAuditLog]] = None
|
|
||||||
self.query: str
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_request_email(email: str) -> EmailSearchResult:
|
def from_email(email: str) -> EmailSearchResult:
|
||||||
output = EmailSearchResult()
|
output = EmailSearchResult()
|
||||||
output.query = email
|
|
||||||
alias = Alias.get_by(email=email)
|
alias = Alias.get_by(email=email)
|
||||||
if alias:
|
if alias:
|
||||||
output.alias = alias
|
output.alias = alias
|
||||||
output.alias_audit_log = (
|
|
||||||
AliasAuditLog.filter_by(alias_id=alias.id)
|
|
||||||
.order_by(AliasAuditLog.created_at.desc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
try:
|
|
||||||
user_id = int(email)
|
|
||||||
user = User.get(user_id)
|
|
||||||
except ValueError:
|
|
||||||
user = User.get_by(email=email)
|
user = User.get_by(email=email)
|
||||||
if user:
|
if user:
|
||||||
output.user = user
|
output.user = user
|
||||||
output.user_audit_log = (
|
|
||||||
UserAuditLog.filter_by(user_id=user.id)
|
|
||||||
.order_by(UserAuditLog.created_at.desc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
output.no_match = False
|
|
||||||
|
|
||||||
user_audit_log = (
|
|
||||||
UserAuditLog.filter_by(user_email=email)
|
|
||||||
.order_by(UserAuditLog.created_at.desc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
if user_audit_log:
|
|
||||||
output.user_audit_log = user_audit_log
|
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
mailboxes = (
|
mailboxes = (
|
||||||
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
||||||
@ -840,20 +763,10 @@ class EmailSearchResult:
|
|||||||
deleted_alias = DeletedAlias.get_by(email=email)
|
deleted_alias = DeletedAlias.get_by(email=email)
|
||||||
if deleted_alias:
|
if deleted_alias:
|
||||||
output.deleted_alias = deleted_alias
|
output.deleted_alias = deleted_alias
|
||||||
output.deleted_alias_audit_log = (
|
|
||||||
AliasAuditLog.filter_by(alias_email=deleted_alias.email)
|
|
||||||
.order_by(AliasAuditLog.created_at.desc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
||||||
if domain_deleted_alias:
|
if domain_deleted_alias:
|
||||||
output.domain_deleted_alias = domain_deleted_alias
|
output.domain_deleted_alias = domain_deleted_alias
|
||||||
output.domain_deleted_alias_audit_log = (
|
|
||||||
AliasAuditLog.filter_by(alias_email=domain_deleted_alias.email)
|
|
||||||
.order_by(AliasAuditLog.created_at.desc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@ -872,25 +785,6 @@ class EmailSearchHelpers:
|
|||||||
def mailbox_count(user: User) -> int:
|
def mailbox_count(user: User) -> int:
|
||||||
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
|
|
||||||
return (
|
|
||||||
Session.query(Mailbox)
|
|
||||||
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
|
|
||||||
.union(
|
|
||||||
Session.query(Mailbox)
|
|
||||||
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
|
|
||||||
.filter(AliasMailbox.alias_id == alias.id)
|
|
||||||
)
|
|
||||||
.order_by(Mailbox.id)
|
|
||||||
.limit(10)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def alias_mailbox_count(alias: Alias) -> int:
|
|
||||||
return len(alias.mailboxes)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def alias_list(user: User) -> list[Alias]:
|
def alias_list(user: User) -> list[Alias]:
|
||||||
return (
|
return (
|
||||||
@ -918,10 +812,10 @@ class EmailSearchAdmin(BaseView):
|
|||||||
@expose("/", methods=["GET", "POST"])
|
@expose("/", methods=["GET", "POST"])
|
||||||
def index(self):
|
def index(self):
|
||||||
search = EmailSearchResult()
|
search = EmailSearchResult()
|
||||||
email = request.args.get("query")
|
email = request.args.get("email")
|
||||||
if email is not None and len(email) > 0:
|
if email is not None and len(email) > 0:
|
||||||
email = email.strip()
|
email = email.strip()
|
||||||
search = EmailSearchResult.from_request_email(email)
|
search = EmailSearchResult.from_email(email)
|
||||||
|
|
||||||
return self.render(
|
return self.render(
|
||||||
"admin/email_search.html",
|
"admin/email_search.html",
|
||||||
@ -929,135 +823,3 @@ class EmailSearchAdmin(BaseView):
|
|||||||
data=search,
|
data=search,
|
||||||
helper=EmailSearchHelpers,
|
helper=EmailSearchHelpers,
|
||||||
)
|
)
|
||||||
|
|
||||||
@expose("/partner_unlink", methods=["POST"])
|
|
||||||
def delete_partner_link(self):
|
|
||||||
user_id = request.form.get("user_id")
|
|
||||||
if not user_id:
|
|
||||||
flash("Missing user_id", "error")
|
|
||||||
return redirect(url_for("admin.email_search.index"))
|
|
||||||
try:
|
|
||||||
user_id = int(user_id)
|
|
||||||
except ValueError:
|
|
||||||
flash("Missing user_id", "error")
|
|
||||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
|
||||||
user = User.get(user_id)
|
|
||||||
if user is None:
|
|
||||||
flash("User not found", "error")
|
|
||||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
|
||||||
external_user_id = perform_proton_account_unlink(user, skip_check=True)
|
|
||||||
if not external_user_id:
|
|
||||||
flash("User unlinked", "success")
|
|
||||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
|
||||||
|
|
||||||
AdminAuditLog.create(
|
|
||||||
admin_user_id=user.id,
|
|
||||||
model=User.__class__.__name__,
|
|
||||||
model_id=user.id,
|
|
||||||
action=AuditLogActionEnum.unlink_user.value,
|
|
||||||
data={"external_user_id": external_user_id},
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
return redirect(url_for("admin.email_search.index", query=user_id))
|
|
||||||
|
|
||||||
|
|
||||||
class CustomDomainWithValidationData:
|
|
||||||
def __init__(self, domain: CustomDomain):
|
|
||||||
self.domain: CustomDomain = domain
|
|
||||||
self.ownership_expected: Optional[ExpectedValidationRecords] = None
|
|
||||||
self.ownership_validation: Optional[DomainValidationResult] = None
|
|
||||||
self.mx_expected: Optional[dict[int, ExpectedValidationRecords]] = None
|
|
||||||
self.mx_validation: Optional[DomainValidationResult] = None
|
|
||||||
self.spf_expected: Optional[ExpectedValidationRecords] = None
|
|
||||||
self.spf_validation: Optional[DomainValidationResult] = None
|
|
||||||
self.dkim_expected: {str: ExpectedValidationRecords} = {}
|
|
||||||
self.dkim_validation: {str: str} = {}
|
|
||||||
|
|
||||||
|
|
||||||
class CustomDomainSearchResult:
|
|
||||||
def __init__(self):
|
|
||||||
self.no_match: bool = False
|
|
||||||
self.user: Optional[User] = None
|
|
||||||
self.domains: list[CustomDomainWithValidationData] = []
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_user(user: Optional[User]) -> CustomDomainSearchResult:
|
|
||||||
out = CustomDomainSearchResult()
|
|
||||||
if user is None:
|
|
||||||
out.no_match = True
|
|
||||||
return out
|
|
||||||
out.user = user
|
|
||||||
dns_client = get_network_dns_client()
|
|
||||||
validator = CustomDomainValidation(
|
|
||||||
dkim_domain=config.EMAIL_DOMAIN,
|
|
||||||
partner_domains=config.PARTNER_DNS_CUSTOM_DOMAINS,
|
|
||||||
partner_domains_validation_prefixes=config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES,
|
|
||||||
dns_client=dns_client,
|
|
||||||
)
|
|
||||||
for custom_domain in user.custom_domains:
|
|
||||||
validation_data = CustomDomainWithValidationData(custom_domain)
|
|
||||||
if not custom_domain.ownership_verified:
|
|
||||||
validation_data.ownership_expected = (
|
|
||||||
validator.get_ownership_verification_record(custom_domain)
|
|
||||||
)
|
|
||||||
validation_data.ownership_validation = (
|
|
||||||
validator.validate_domain_ownership(custom_domain)
|
|
||||||
)
|
|
||||||
if not custom_domain.verified:
|
|
||||||
validation_data.mx_expected = validator.get_expected_mx_records(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
validation_data.mx_validation = validator.validate_mx_records(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
if not custom_domain.spf_verified:
|
|
||||||
validation_data.spf_expected = validator.get_expected_spf_record(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
validation_data.spf_validation = validator.validate_spf_records(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
if not custom_domain.dkim_verified:
|
|
||||||
validation_data.dkim_expected = validator.get_dkim_records(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
validation_data.dkim_validation = validator.validate_dkim_records(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
out.domains.append(validation_data)
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
class CustomDomainSearchAdmin(BaseView):
|
|
||||||
def is_accessible(self):
|
|
||||||
return current_user.is_authenticated and current_user.is_admin
|
|
||||||
|
|
||||||
def inaccessible_callback(self, name, **kwargs):
|
|
||||||
# redirect to login page if user doesn't have access
|
|
||||||
flash("You don't have access to the admin page", "error")
|
|
||||||
return redirect(url_for("dashboard.index", next=request.url))
|
|
||||||
|
|
||||||
@expose("/", methods=["GET", "POST"])
|
|
||||||
def index(self):
|
|
||||||
query = request.args.get("user")
|
|
||||||
if query is None:
|
|
||||||
search = CustomDomainSearchResult()
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
user_id = int(query)
|
|
||||||
user = User.get_by(id=user_id)
|
|
||||||
except ValueError:
|
|
||||||
user = User.get_by(email=query)
|
|
||||||
if user is None:
|
|
||||||
cd = CustomDomain.get_by(domain=query)
|
|
||||||
if cd is not None:
|
|
||||||
user = cd.user
|
|
||||||
search = CustomDomainSearchResult.from_user(user)
|
|
||||||
|
|
||||||
return self.render(
|
|
||||||
"admin/custom_domain_search.html",
|
|
||||||
data=search,
|
|
||||||
query=query,
|
|
||||||
)
|
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from app.models import Alias, AliasAuditLog
|
|
||||||
|
|
||||||
|
|
||||||
class AliasAuditLogAction(Enum):
|
|
||||||
CreateAlias = "create"
|
|
||||||
ChangeAliasStatus = "change_status"
|
|
||||||
DeleteAlias = "delete"
|
|
||||||
UpdateAlias = "update"
|
|
||||||
|
|
||||||
InitiateTransferAlias = "initiate_transfer_alias"
|
|
||||||
AcceptTransferAlias = "accept_transfer_alias"
|
|
||||||
TransferredAlias = "transferred_alias"
|
|
||||||
|
|
||||||
ChangedMailboxes = "changed_mailboxes"
|
|
||||||
|
|
||||||
CreateContact = "create_contact"
|
|
||||||
UpdateContact = "update_contact"
|
|
||||||
DeleteContact = "delete_contact"
|
|
||||||
|
|
||||||
|
|
||||||
def emit_alias_audit_log(
|
|
||||||
alias: Alias,
|
|
||||||
action: AliasAuditLogAction,
|
|
||||||
message: str,
|
|
||||||
user_id: Optional[int] = None,
|
|
||||||
commit: bool = False,
|
|
||||||
):
|
|
||||||
AliasAuditLog.create(
|
|
||||||
user_id=user_id or alias.user_id,
|
|
||||||
alias_id=alias.id,
|
|
||||||
alias_email=alias.email,
|
|
||||||
action=action.value,
|
|
||||||
message=message,
|
|
||||||
commit=commit,
|
|
||||||
)
|
|
@ -1,62 +0,0 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
|
||||||
from app.db import Session
|
|
||||||
from app.models import Alias, AliasMailbox, Mailbox
|
|
||||||
|
|
||||||
_MAX_MAILBOXES_PER_ALIAS = 20
|
|
||||||
|
|
||||||
|
|
||||||
class CannotSetMailboxesForAliasCause(Enum):
|
|
||||||
Forbidden = "Forbidden"
|
|
||||||
EmptyMailboxes = "Must choose at least one mailbox"
|
|
||||||
TooManyMailboxes = "Too many mailboxes"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class SetMailboxesForAliasResult:
|
|
||||||
performed_change: bool
|
|
||||||
reason: Optional[CannotSetMailboxesForAliasCause]
|
|
||||||
|
|
||||||
|
|
||||||
def set_mailboxes_for_alias(
|
|
||||||
user_id: int, alias: Alias, mailbox_ids: List[int]
|
|
||||||
) -> Optional[CannotSetMailboxesForAliasCause]:
|
|
||||||
if len(mailbox_ids) == 0:
|
|
||||||
return CannotSetMailboxesForAliasCause.EmptyMailboxes
|
|
||||||
if len(mailbox_ids) > _MAX_MAILBOXES_PER_ALIAS:
|
|
||||||
return CannotSetMailboxesForAliasCause.TooManyMailboxes
|
|
||||||
|
|
||||||
mailboxes = (
|
|
||||||
Session.query(Mailbox)
|
|
||||||
.filter(
|
|
||||||
Mailbox.id.in_(mailbox_ids),
|
|
||||||
Mailbox.user_id == user_id,
|
|
||||||
Mailbox.verified == True, # noqa: E712
|
|
||||||
)
|
|
||||||
.order_by(Mailbox.id.asc())
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
if len(mailboxes) != len(mailbox_ids):
|
|
||||||
return CannotSetMailboxesForAliasCause.Forbidden
|
|
||||||
|
|
||||||
# first remove all existing alias-mailboxes links
|
|
||||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
# then add all new mailboxes, being the first the one associated with the alias
|
|
||||||
for i, mailbox in enumerate(mailboxes):
|
|
||||||
if i == 0:
|
|
||||||
alias.mailbox_id = mailboxes[0].id
|
|
||||||
else:
|
|
||||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
|
||||||
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.ChangedMailboxes,
|
|
||||||
message=",".join([f"{mailbox.id} ({mailbox.email})" for mailbox in mailboxes]),
|
|
||||||
)
|
|
||||||
|
|
||||||
return None
|
|
@ -58,7 +58,7 @@ def verify_prefix_suffix(
|
|||||||
|
|
||||||
# alias_domain must be either one of user custom domains or built-in domains
|
# alias_domain must be either one of user custom domains or built-in domains
|
||||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# SimpleLogin domain case:
|
# SimpleLogin domain case:
|
||||||
@ -75,17 +75,17 @@ def verify_prefix_suffix(
|
|||||||
and not config.DISABLE_ALIAS_SUFFIX
|
and not config.DISABLE_ALIAS_SUFFIX
|
||||||
):
|
):
|
||||||
if not alias_domain_prefix.startswith("."):
|
if not alias_domain_prefix.startswith("."):
|
||||||
LOG.i("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if alias_domain not in user_custom_domains:
|
if alias_domain not in user_custom_domains:
|
||||||
if not config.DISABLE_ALIAS_SUFFIX:
|
if not config.DISABLE_ALIAS_SUFFIX:
|
||||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if alias_domain not in available_sl_domains:
|
if alias_domain not in available_sl_domains:
|
||||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
import csv
|
import csv
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
from email_validator import validate_email, EmailNotValidError
|
from email_validator import validate_email, EmailNotValidError
|
||||||
from sqlalchemy.exc import IntegrityError, DataError
|
from sqlalchemy.exc import IntegrityError, DataError
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
|
|
||||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
|
||||||
from app.config import (
|
from app.config import (
|
||||||
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
||||||
BOUNCE_PREFIX,
|
BOUNCE_PREFIX,
|
||||||
@ -25,7 +23,6 @@ from app.email_utils import (
|
|||||||
send_cannot_create_domain_alias,
|
send_cannot_create_domain_alias,
|
||||||
send_email,
|
send_email,
|
||||||
render,
|
render,
|
||||||
sl_formataddr,
|
|
||||||
)
|
)
|
||||||
from app.errors import AliasInTrashError
|
from app.errors import AliasInTrashError
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
@ -33,7 +30,6 @@ from app.events.generated.event_pb2 import (
|
|||||||
AliasDeleted,
|
AliasDeleted,
|
||||||
AliasStatusChanged,
|
AliasStatusChanged,
|
||||||
EventContent,
|
EventContent,
|
||||||
AliasCreated,
|
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
@ -367,18 +363,11 @@ def delete_alias(
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
|
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
|
||||||
|
|
||||||
alias_id = alias.id
|
|
||||||
alias_email = alias.email
|
|
||||||
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias, AliasAuditLogAction.DeleteAlias, "Alias deleted by user action"
|
|
||||||
)
|
|
||||||
Alias.filter(Alias.id == alias.id).delete()
|
Alias.filter(Alias.id == alias.id).delete()
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
EventDispatcher.send_event(
|
EventDispatcher.send_event(
|
||||||
user,
|
user, EventContent(alias_deleted=AliasDeleted(alias_id=alias.id))
|
||||||
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
|
|
||||||
)
|
)
|
||||||
if commit:
|
if commit:
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -455,7 +444,7 @@ def alias_export_csv(user, csv_direct_export=False):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
|
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||||
# cannot transfer alias which is used for receiving newsletter
|
# cannot transfer alias which is used for receiving newsletter
|
||||||
if User.get_by(newsletter_alias_id=alias.id):
|
if User.get_by(newsletter_alias_id=alias.id):
|
||||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||||
@ -509,90 +498,17 @@ def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
|
|||||||
alias.disable_pgp = False
|
alias.disable_pgp = False
|
||||||
alias.pinned = False
|
alias.pinned = False
|
||||||
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.TransferredAlias,
|
|
||||||
message=f"Lost ownership of alias due to alias transfer confirmed. New owner is {new_user.id}",
|
|
||||||
user_id=old_user.id,
|
|
||||||
)
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
old_user,
|
|
||||||
EventContent(
|
|
||||||
alias_deleted=AliasDeleted(
|
|
||||||
id=alias.id,
|
|
||||||
email=alias.email,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.AcceptTransferAlias,
|
|
||||||
message=f"Accepted alias transfer from user {old_user.id}",
|
|
||||||
user_id=new_user.id,
|
|
||||||
)
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
new_user,
|
|
||||||
EventContent(
|
|
||||||
alias_created=AliasCreated(
|
|
||||||
id=alias.id,
|
|
||||||
email=alias.email,
|
|
||||||
note=alias.note,
|
|
||||||
enabled=alias.enabled,
|
|
||||||
created_at=int(alias.created_at.timestamp),
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
def change_alias_status(
|
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
|
||||||
alias: Alias, enabled: bool, message: Optional[str] = None, commit: bool = False
|
|
||||||
):
|
|
||||||
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
||||||
alias.enabled = enabled
|
alias.enabled = enabled
|
||||||
|
|
||||||
event = AliasStatusChanged(
|
event = AliasStatusChanged(
|
||||||
id=alias.id,
|
alias_id=alias.id, alias_email=alias.email, enabled=enabled
|
||||||
email=alias.email,
|
|
||||||
enabled=enabled,
|
|
||||||
created_at=int(alias.created_at.timestamp),
|
|
||||||
)
|
)
|
||||||
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
||||||
audit_log_message = f"Set alias status to {enabled}"
|
|
||||||
if message is not None:
|
|
||||||
audit_log_message += f". {message}"
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias, AliasAuditLogAction.ChangeAliasStatus, audit_log_message
|
|
||||||
)
|
|
||||||
|
|
||||||
if commit:
|
if commit:
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AliasRecipientName:
|
|
||||||
name: str
|
|
||||||
message: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
|
|
||||||
"""
|
|
||||||
Logic:
|
|
||||||
1. If alias has name, use it
|
|
||||||
2. If alias has custom domain, and custom domain has name, use it
|
|
||||||
3. Otherwise, use the alias email as the recipient
|
|
||||||
"""
|
|
||||||
if alias.name:
|
|
||||||
return AliasRecipientName(
|
|
||||||
name=sl_formataddr((alias.name, alias.email)),
|
|
||||||
message=f"Put alias name {alias.name} in from header",
|
|
||||||
)
|
|
||||||
elif alias.custom_domain:
|
|
||||||
if alias.custom_domain.name:
|
|
||||||
return AliasRecipientName(
|
|
||||||
name=sl_formataddr((alias.custom_domain.name, alias.email)),
|
|
||||||
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
|
|
||||||
)
|
|
||||||
return AliasRecipientName(name=alias.email)
|
|
||||||
|
@ -191,8 +191,15 @@ def get_alias_infos_with_pagination_v3(
|
|||||||
q = q.order_by(Alias.email.desc())
|
q = q.order_by(Alias.email.desc())
|
||||||
else:
|
else:
|
||||||
# default sorting
|
# default sorting
|
||||||
|
latest_activity = case(
|
||||||
|
[
|
||||||
|
(Alias.created_at > EmailLog.created_at, Alias.created_at),
|
||||||
|
(Alias.created_at < EmailLog.created_at, EmailLog.created_at),
|
||||||
|
],
|
||||||
|
else_=Alias.created_at,
|
||||||
|
)
|
||||||
q = q.order_by(Alias.pinned.desc())
|
q = q.order_by(Alias.pinned.desc())
|
||||||
q = q.order_by(func.greatest(Alias.created_at, EmailLog.created_at).desc())
|
q = q.order_by(latest_activity.desc())
|
||||||
|
|
||||||
q = q.limit(page_limit).offset(page_id * page_size)
|
q = q.limit(page_limit).offset(page_id * page_size)
|
||||||
|
|
||||||
|
@ -1,13 +1,9 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from deprecated import deprecated
|
from deprecated import deprecated
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask import request
|
from flask import request
|
||||||
|
|
||||||
from app import alias_utils
|
from app import alias_utils
|
||||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
|
||||||
from app.alias_mailbox_utils import set_mailboxes_for_alias
|
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
from app.api.serializer import (
|
from app.api.serializer import (
|
||||||
AliasInfo,
|
AliasInfo,
|
||||||
@ -30,7 +26,7 @@ from app.errors import (
|
|||||||
)
|
)
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Alias, Contact, Mailbox, AliasDeleteReason
|
from app.models import Alias, Contact, Mailbox, AliasMailbox, AliasDeleteReason
|
||||||
|
|
||||||
|
|
||||||
@deprecated
|
@deprecated
|
||||||
@ -189,11 +185,7 @@ def toggle_alias(alias_id):
|
|||||||
if not alias or alias.user_id != user.id:
|
if not alias or alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
alias_utils.change_alias_status(
|
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
|
||||||
alias,
|
|
||||||
enabled=not alias.enabled,
|
|
||||||
message=f"Set enabled={not alias.enabled} via API",
|
|
||||||
)
|
|
||||||
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -280,12 +272,10 @@ def update_alias(alias_id):
|
|||||||
if not alias or alias.user_id != user.id:
|
if not alias or alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
changed_fields = []
|
|
||||||
changed = False
|
changed = False
|
||||||
if "note" in data:
|
if "note" in data:
|
||||||
new_note = data.get("note")
|
new_note = data.get("note")
|
||||||
alias.note = new_note
|
alias.note = new_note
|
||||||
changed_fields.append("note")
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "mailbox_id" in data:
|
if "mailbox_id" in data:
|
||||||
@ -295,22 +285,35 @@ def update_alias(alias_id):
|
|||||||
return jsonify(error="Forbidden"), 400
|
return jsonify(error="Forbidden"), 400
|
||||||
|
|
||||||
alias.mailbox_id = mailbox_id
|
alias.mailbox_id = mailbox_id
|
||||||
changed_fields.append(f"mailbox_id ({mailbox_id})")
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "mailbox_ids" in data:
|
if "mailbox_ids" in data:
|
||||||
try:
|
|
||||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||||
except ValueError:
|
mailboxes: [Mailbox] = []
|
||||||
return jsonify(error="Invalid mailbox_id"), 400
|
|
||||||
err = set_mailboxes_for_alias(
|
# check if all mailboxes belong to user
|
||||||
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
for mailbox_id in mailbox_ids:
|
||||||
)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
if err:
|
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||||
return jsonify(error=err.value), 400
|
return jsonify(error="Forbidden"), 400
|
||||||
|
mailboxes.append(mailbox)
|
||||||
|
|
||||||
|
if not mailboxes:
|
||||||
|
return jsonify(error="Must choose at least one mailbox"), 400
|
||||||
|
|
||||||
|
# <<< update alias mailboxes >>>
|
||||||
|
# first remove all existing alias-mailboxes links
|
||||||
|
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
|
# then add all new mailboxes
|
||||||
|
for i, mailbox in enumerate(mailboxes):
|
||||||
|
if i == 0:
|
||||||
|
alias.mailbox_id = mailboxes[0].id
|
||||||
|
else:
|
||||||
|
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||||
|
# <<< END update alias mailboxes >>>
|
||||||
|
|
||||||
mailbox_ids_string = ",".join(map(str, mailbox_ids))
|
|
||||||
changed_fields.append(f"mailbox_ids ({mailbox_ids_string})")
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "name" in data:
|
if "name" in data:
|
||||||
@ -322,26 +325,17 @@ def update_alias(alias_id):
|
|||||||
if new_name:
|
if new_name:
|
||||||
new_name = new_name.replace("\n", "")
|
new_name = new_name.replace("\n", "")
|
||||||
alias.name = new_name
|
alias.name = new_name
|
||||||
changed_fields.append("name")
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "disable_pgp" in data:
|
if "disable_pgp" in data:
|
||||||
alias.disable_pgp = data.get("disable_pgp")
|
alias.disable_pgp = data.get("disable_pgp")
|
||||||
changed_fields.append("disable_pgp")
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "pinned" in data:
|
if "pinned" in data:
|
||||||
alias.pinned = data.get("pinned")
|
alias.pinned = data.get("pinned")
|
||||||
changed_fields.append("pinned")
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
changed_fields_string = ",".join(changed_fields)
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias,
|
|
||||||
AliasAuditLogAction.UpdateAlias,
|
|
||||||
f"Alias fields updated ({changed_fields_string})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return jsonify(ok=True), 200
|
return jsonify(ok=True), 200
|
||||||
@ -422,14 +416,15 @@ def create_contact_route(alias_id):
|
|||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
|
||||||
alias: Optional[Alias] = Alias.get_by(id=alias_id, user_id=g.user.id)
|
alias: Alias = Alias.get(alias_id)
|
||||||
if not alias:
|
|
||||||
|
if alias.user_id != g.user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
contact_address = data.get("contact")
|
contact_address = data.get("contact")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
contact = create_contact(alias, contact_address)
|
contact = create_contact(g.user, alias, contact_address)
|
||||||
except ErrContactErrorUpgradeNeeded as err:
|
except ErrContactErrorUpgradeNeeded as err:
|
||||||
return jsonify(error=err.error_for_user()), 403
|
return jsonify(error=err.error_for_user()), 403
|
||||||
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
||||||
@ -451,16 +446,11 @@ def delete_contact(contact_id):
|
|||||||
200
|
200
|
||||||
"""
|
"""
|
||||||
user = g.user
|
user = g.user
|
||||||
contact: Optional[Contact] = Contact.get(contact_id)
|
contact = Contact.get(contact_id)
|
||||||
|
|
||||||
if not contact or contact.alias.user_id != user.id:
|
if not contact or contact.alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=contact.alias,
|
|
||||||
action=AliasAuditLogAction.DeleteContact,
|
|
||||||
message=f"Deleted contact {contact_id} ({contact.email})",
|
|
||||||
)
|
|
||||||
Contact.delete(contact_id)
|
Contact.delete(contact_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -478,17 +468,12 @@ def toggle_contact(contact_id):
|
|||||||
200
|
200
|
||||||
"""
|
"""
|
||||||
user = g.user
|
user = g.user
|
||||||
contact: Optional[Contact] = Contact.get(contact_id)
|
contact = Contact.get(contact_id)
|
||||||
|
|
||||||
if not contact or contact.alias.user_id != user.id:
|
if not contact or contact.alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
contact.block_forward = not contact.block_forward
|
contact.block_forward = not contact.block_forward
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=contact.alias,
|
|
||||||
action=AliasAuditLogAction.UpdateContact,
|
|
||||||
message=f"Set contact state {contact.id} {contact.email} -> {contact.website_email} to blocked {contact.block_forward}",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return jsonify(block_forward=contact.block_forward), 200
|
return jsonify(block_forward=contact.block_forward), 200
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import secrets
|
import secrets
|
||||||
import string
|
import string
|
||||||
|
|
||||||
|
import facebook
|
||||||
import google.oauth2.credentials
|
import google.oauth2.credentials
|
||||||
import googleapiclient.discovery
|
import googleapiclient.discovery
|
||||||
from flask import jsonify, request
|
from flask import jsonify, request
|
||||||
@ -22,7 +23,6 @@ from app.events.auth_event import LoginEvent, RegisterEvent
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import sanitize_email, canonicalize_email
|
from app.utils import sanitize_email, canonicalize_email
|
||||||
|
|
||||||
|
|
||||||
@ -52,12 +52,8 @@ def auth_login():
|
|||||||
password = data.get("password")
|
password = data.get("password")
|
||||||
device = data.get("device")
|
device = data.get("device")
|
||||||
|
|
||||||
email = data.get("email")
|
email = sanitize_email(data.get("email"))
|
||||||
if not email:
|
canonical_email = canonicalize_email(data.get("email"))
|
||||||
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
|
|
||||||
return jsonify(error="Email or password incorrect"), 400
|
|
||||||
email = sanitize_email(email)
|
|
||||||
canonical_email = canonicalize_email(email)
|
|
||||||
|
|
||||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||||
|
|
||||||
@ -187,11 +183,6 @@ def auth_activate():
|
|||||||
|
|
||||||
LOG.d("activate user %s", user)
|
LOG.d("activate user %s", user)
|
||||||
user.activated = True
|
user.activated = True
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.ActivateUser,
|
|
||||||
message=f"User has been activated: {user.email}",
|
|
||||||
)
|
|
||||||
AccountActivation.delete(account_activation.id)
|
AccountActivation.delete(account_activation.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -260,8 +251,6 @@ def auth_facebook():
|
|||||||
}
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import facebook
|
|
||||||
|
|
||||||
data = request.get_json()
|
data = request.get_json()
|
||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
@ -2,10 +2,8 @@ from flask import g, request
|
|||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
|
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
from app.custom_domain_utils import set_custom_domain_mailboxes
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox
|
||||||
from app.models import CustomDomain, DomainDeletedAlias
|
|
||||||
|
|
||||||
|
|
||||||
def custom_domain_to_dict(custom_domain: CustomDomain):
|
def custom_domain_to_dict(custom_domain: CustomDomain):
|
||||||
@ -102,14 +100,23 @@ def update_custom_domain(custom_domain_id):
|
|||||||
|
|
||||||
if "mailbox_ids" in data:
|
if "mailbox_ids" in data:
|
||||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||||
result = set_custom_domain_mailboxes(user.id, custom_domain, mailbox_ids)
|
if mailbox_ids:
|
||||||
if result.success:
|
# check if mailbox is not tempered with
|
||||||
changed = True
|
mailboxes = []
|
||||||
else:
|
for mailbox_id in mailbox_ids:
|
||||||
LOG.info(
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
f"Prevented from updating mailboxes [custom_domain_id={custom_domain.id}]: {result.reason.value}"
|
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||||
)
|
|
||||||
return jsonify(error="Forbidden"), 400
|
return jsonify(error="Forbidden"), 400
|
||||||
|
mailboxes.append(mailbox)
|
||||||
|
|
||||||
|
# first remove all existing domain-mailboxes links
|
||||||
|
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
|
for mailbox in mailboxes:
|
||||||
|
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||||
|
|
||||||
|
changed = True
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -6,7 +6,12 @@ from flask import request
|
|||||||
|
|
||||||
from app import mailbox_utils
|
from app import mailbox_utils
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
|
from app.dashboard.views.mailbox_detail import verify_mailbox_change
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.email_utils import (
|
||||||
|
mailbox_already_used,
|
||||||
|
email_can_be_used_as_mailbox,
|
||||||
|
)
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
|
|
||||||
@ -33,11 +38,7 @@ def create_mailbox():
|
|||||||
the new mailbox dict
|
the new mailbox dict
|
||||||
"""
|
"""
|
||||||
user = g.user
|
user = g.user
|
||||||
email = request.get_json().get("email")
|
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||||
if not email:
|
|
||||||
return jsonify(error="Invalid email"), 400
|
|
||||||
|
|
||||||
mailbox_email = sanitize_email(email)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
||||||
@ -117,10 +118,20 @@ def update_mailbox(mailbox_id):
|
|||||||
|
|
||||||
if "email" in data:
|
if "email" in data:
|
||||||
new_email = sanitize_email(data.get("email"))
|
new_email = sanitize_email(data.get("email"))
|
||||||
|
|
||||||
|
if mailbox_already_used(new_email, user):
|
||||||
|
return jsonify(error=f"{new_email} already used"), 400
|
||||||
|
elif not email_can_be_used_as_mailbox(new_email):
|
||||||
|
return (
|
||||||
|
jsonify(
|
||||||
|
error=f"{new_email} cannot be used. Please note a mailbox cannot "
|
||||||
|
f"be a disposable email address"
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mailbox_utils.request_mailbox_email_change(user, mailbox, new_email)
|
verify_mailbox_change(user, mailbox, new_email)
|
||||||
except mailbox_utils.MailboxError as e:
|
|
||||||
return jsonify(error=e.msg), 400
|
|
||||||
except SMTPRecipientsRefused:
|
except SMTPRecipientsRefused:
|
||||||
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
|
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
|
||||||
else:
|
else:
|
||||||
@ -130,7 +141,7 @@ def update_mailbox(mailbox_id):
|
|||||||
if "cancel_email_change" in data:
|
if "cancel_email_change" in data:
|
||||||
cancel_email_change = data.get("cancel_email_change")
|
cancel_email_change = data.get("cancel_email_change")
|
||||||
if cancel_email_change:
|
if cancel_email_change:
|
||||||
mailbox_utils.cancel_email_change(mailbox.id, user)
|
mailbox.new_email = None
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
from email_validator import EmailNotValidError
|
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask import jsonify, request
|
from flask import jsonify, request
|
||||||
|
|
||||||
@ -62,17 +61,8 @@ def new_custom_alias_v2():
|
|||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
|
||||||
alias_prefix = data.get("alias_prefix", "")
|
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||||
if not isinstance(alias_prefix, str) or not alias_prefix:
|
signed_suffix = data.get("signed_suffix", "").strip()
|
||||||
return jsonify(error="invalid value for alias_prefix"), 400
|
|
||||||
|
|
||||||
alias_prefix = alias_prefix.strip().lower().replace(" ", "")
|
|
||||||
signed_suffix = data.get("signed_suffix", "")
|
|
||||||
if not isinstance(signed_suffix, str) or not signed_suffix:
|
|
||||||
return jsonify(error="invalid value for signed_suffix"), 400
|
|
||||||
|
|
||||||
signed_suffix = signed_suffix.strip()
|
|
||||||
|
|
||||||
note = data.get("note")
|
note = data.get("note")
|
||||||
alias_prefix = convert_to_id(alias_prefix)
|
alias_prefix = convert_to_id(alias_prefix)
|
||||||
|
|
||||||
@ -103,15 +93,12 @@ def new_custom_alias_v2():
|
|||||||
400,
|
400,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
|
||||||
alias = Alias.create(
|
alias = Alias.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
email=full_alias,
|
email=full_alias,
|
||||||
mailbox_id=user.default_mailbox_id,
|
mailbox_id=user.default_mailbox_id,
|
||||||
note=note,
|
note=note,
|
||||||
)
|
)
|
||||||
except EmailNotValidError:
|
|
||||||
return jsonify(error="Email is not valid"), 400
|
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -166,17 +153,8 @@ def new_custom_alias_v3():
|
|||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
return jsonify(error="request body does not follow the required format"), 400
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
alias_prefix_data = data.get("alias_prefix", "") or ""
|
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||||
|
|
||||||
if not isinstance(alias_prefix_data, str):
|
|
||||||
return jsonify(error="request body does not follow the required format"), 400
|
|
||||||
|
|
||||||
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
|
||||||
signed_suffix = data.get("signed_suffix", "") or ""
|
signed_suffix = data.get("signed_suffix", "") or ""
|
||||||
|
|
||||||
if not isinstance(signed_suffix, str):
|
|
||||||
return jsonify(error="request body does not follow the required format"), 400
|
|
||||||
|
|
||||||
signed_suffix = signed_suffix.strip()
|
signed_suffix = signed_suffix.strip()
|
||||||
|
|
||||||
mailbox_ids = data.get("mailbox_ids")
|
mailbox_ids = data.get("mailbox_ids")
|
||||||
|
@ -12,7 +12,7 @@ from app.models import (
|
|||||||
SenderFormatEnum,
|
SenderFormatEnum,
|
||||||
AliasSuffixEnum,
|
AliasSuffixEnum,
|
||||||
)
|
)
|
||||||
from app.proton.proton_unlink import perform_proton_account_unlink
|
from app.proton.utils import perform_proton_account_unlink
|
||||||
|
|
||||||
|
|
||||||
def setting_to_dict(user: User):
|
def setting_to_dict(user: User):
|
||||||
@ -144,6 +144,5 @@ def get_available_domains_for_random_alias_v2():
|
|||||||
@require_api_auth
|
@require_api_auth
|
||||||
def unlink_proton_account():
|
def unlink_proton_account():
|
||||||
user = g.user
|
user = g.user
|
||||||
if not perform_proton_account_unlink(user):
|
perform_proton_account_unlink(user)
|
||||||
return jsonify(error="The account cannot be unlinked"), 400
|
|
||||||
return jsonify({"ok": True})
|
return jsonify({"ok": True})
|
||||||
|
@ -2,11 +2,10 @@ from flask import jsonify, g
|
|||||||
from sqlalchemy_utils.types.arrow import arrow
|
from sqlalchemy_utils.types.arrow import arrow
|
||||||
|
|
||||||
from app.api.base import api_bp, require_api_sudo, require_api_auth
|
from app.api.base import api_bp, require_api_sudo, require_api_auth
|
||||||
from app.constants import JobType
|
from app import config
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Job, ApiToCookieToken
|
from app.models import Job, ApiToCookieToken
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
@api_bp.route("/user", methods=["DELETE"])
|
@api_bp.route("/user", methods=["DELETE"])
|
||||||
@ -17,14 +16,9 @@ def delete_user():
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
# Schedule delete account job
|
# Schedule delete account job
|
||||||
emit_user_audit_log(
|
|
||||||
user=g.user,
|
|
||||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
|
||||||
message=f"Marked user {g.user.id} ({g.user.email}) for deletion from API",
|
|
||||||
)
|
|
||||||
LOG.w("schedule delete account job for %s", g.user)
|
LOG.w("schedule delete account job for %s", g.user)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.DELETE_ACCOUNT.value,
|
name=config.JOB_DELETE_ACCOUNT,
|
||||||
payload={"user_id": g.user.id},
|
payload={"user_id": g.user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
@ -44,8 +38,6 @@ def get_api_session_token():
|
|||||||
token: "asdli3ldq39h9hd3",
|
token: "asdli3ldq39h9hd3",
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
if not g.api_key:
|
|
||||||
return jsonify(ok=False), 401
|
|
||||||
token = ApiToCookieToken.create(
|
token = ApiToCookieToken.create(
|
||||||
user=g.user,
|
user=g.user,
|
||||||
api_key_id=g.api_key.id,
|
api_key_id=g.api_key.id,
|
||||||
|
@ -12,7 +12,7 @@ from app.dashboard.views.index import get_stats
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.image_validation import detect_image_format, ImageFormat
|
from app.image_validation import detect_image_format, ImageFormat
|
||||||
from app.models import ApiKey, File, PartnerUser, User
|
from app.models import ApiKey, File, PartnerUser, User
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.session import logout_session
|
from app.session import logout_session
|
||||||
from app.utils import random_string
|
from app.utils import random_string
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ def update_user_info():
|
|||||||
File.delete(file.id)
|
File.delete(file.id)
|
||||||
s3.delete(file.path)
|
s3.delete(file.path)
|
||||||
Session.flush()
|
Session.flush()
|
||||||
if data["profile_picture"] is not None:
|
else:
|
||||||
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
||||||
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
||||||
return jsonify(error="Unsupported image format"), 400
|
return jsonify(error="Unsupported image format"), 400
|
||||||
|
@ -7,7 +7,6 @@ from app.db import Session
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import ActivationCode
|
from app.models import ActivationCode
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import sanitize_next_url
|
from app.utils import sanitize_next_url
|
||||||
|
|
||||||
|
|
||||||
@ -48,11 +47,6 @@ def activate():
|
|||||||
|
|
||||||
user = activation_code.user
|
user = activation_code.user
|
||||||
user.activated = True
|
user.activated = True
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.ActivateUser,
|
|
||||||
message=f"User has been activated: {user.email}",
|
|
||||||
)
|
|
||||||
login_user(user)
|
login_user(user)
|
||||||
|
|
||||||
# activation code is to be used only once
|
# activation code is to be used only once
|
||||||
|
@ -10,7 +10,6 @@ from app.events.auth_event import LoginEvent
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User
|
from app.models import User
|
||||||
from app.pw_models import PasswordOracle
|
|
||||||
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
||||||
|
|
||||||
|
|
||||||
@ -44,13 +43,6 @@ def login():
|
|||||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||||
|
|
||||||
if not user or not user.check_password(form.password.data):
|
if not user or not user.check_password(form.password.data):
|
||||||
if not user:
|
|
||||||
# Do the hash to avoid timing attacks nevertheless
|
|
||||||
dummy_pw = PasswordOracle()
|
|
||||||
dummy_pw.password = (
|
|
||||||
"$2b$12$ZWqpL73h4rGNfLkJohAFAu0isqSw/bX9p/tzpbWRz/To5FAftaW8u"
|
|
||||||
)
|
|
||||||
dummy_pw.check_password(form.password.data)
|
|
||||||
# Trigger rate limiter
|
# Trigger rate limiter
|
||||||
g.deduct_limit = True
|
g.deduct_limit = True
|
||||||
form.password.data = None
|
form.password.data = None
|
||||||
|
@ -23,7 +23,7 @@ from app.proton.proton_callback_handler import (
|
|||||||
ProtonCallbackHandler,
|
ProtonCallbackHandler,
|
||||||
Action,
|
Action,
|
||||||
)
|
)
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.utils import sanitize_next_url, sanitize_scheme
|
from app.utils import sanitize_next_url, sanitize_scheme
|
||||||
|
|
||||||
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
|
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
|
||||||
|
@ -9,7 +9,6 @@ from app.auth.views.login_utils import after_login
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.models import ResetPasswordCode
|
from app.models import ResetPasswordCode
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordForm(FlaskForm):
|
class ResetPasswordForm(FlaskForm):
|
||||||
@ -60,11 +59,6 @@ def reset_password():
|
|||||||
|
|
||||||
# this can be served to activate user too
|
# this can be served to activate user too
|
||||||
user.activated = True
|
user.activated = True
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.ResetPassword,
|
|
||||||
message="User has reset their password",
|
|
||||||
)
|
|
||||||
|
|
||||||
# remove all reset password codes
|
# remove all reset password codes
|
||||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||||
|
@ -1,3 +1,2 @@
|
|||||||
SHA1 = "dev"
|
SHA1 = "dev"
|
||||||
BUILD_TIME = "1652365083"
|
BUILD_TIME = "1652365083"
|
||||||
VERSION = SHA1
|
|
||||||
|
@ -35,44 +35,6 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
|
|||||||
return literal_eval(value)
|
return literal_eval(value)
|
||||||
|
|
||||||
|
|
||||||
def get_env_dict(env_var: str) -> dict[str, str]:
|
|
||||||
"""
|
|
||||||
Get an env variable and convert it into a python dictionary with keys and values as strings.
|
|
||||||
Args:
|
|
||||||
env_var (str): env var, example: SL_DB
|
|
||||||
|
|
||||||
Syntax is: key1=value1;key2=value2
|
|
||||||
Components separated by ;
|
|
||||||
key and value separated by =
|
|
||||||
"""
|
|
||||||
value = os.getenv(env_var)
|
|
||||||
if not value:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
components = value.split(";")
|
|
||||||
result = {}
|
|
||||||
for component in components:
|
|
||||||
if component == "":
|
|
||||||
continue
|
|
||||||
parts = component.split("=")
|
|
||||||
if len(parts) != 2:
|
|
||||||
raise Exception(f"Invalid config for env var {env_var}")
|
|
||||||
result[parts[0].strip()] = parts[1].strip()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def get_env_csv(env_var: str, default: Optional[str]) -> list[str]:
|
|
||||||
"""
|
|
||||||
Get an env variable and convert it into a list of strings separated by,
|
|
||||||
Syntax is: val1,val2
|
|
||||||
"""
|
|
||||||
value = os.getenv(env_var, default)
|
|
||||||
if not value:
|
|
||||||
return []
|
|
||||||
return [field.strip() for field in value.split(",") if field.strip()]
|
|
||||||
|
|
||||||
|
|
||||||
config_file = os.environ.get("CONFIG")
|
config_file = os.environ.get("CONFIG")
|
||||||
if config_file:
|
if config_file:
|
||||||
config_file = get_abs_path(config_file)
|
config_file = get_abs_path(config_file)
|
||||||
@ -182,14 +144,6 @@ FIRST_ALIAS_DOMAIN = os.environ.get("FIRST_ALIAS_DOMAIN") or EMAIL_DOMAIN
|
|||||||
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
|
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
|
||||||
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
|
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
|
||||||
|
|
||||||
PROTON_MX_SERVERS = get_env_csv(
|
|
||||||
"PROTON_MX_SERVERS", "mail.protonmail.ch., mailsec.protonmail.ch."
|
|
||||||
)
|
|
||||||
|
|
||||||
PROTON_EMAIL_DOMAINS = get_env_csv(
|
|
||||||
"PROTON_EMAIL_DOMAINS", "proton.me, protonmail.com, protonmail.ch, proton.ch, pm.me"
|
|
||||||
)
|
|
||||||
|
|
||||||
# disable the alias suffix, i.e. the ".random_word" part
|
# disable the alias suffix, i.e. the ".random_word" part
|
||||||
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
|
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
|
||||||
|
|
||||||
@ -316,6 +270,19 @@ MFA_USER_ID = "mfa_user_id"
|
|||||||
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
|
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
|
||||||
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
|
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
|
||||||
|
|
||||||
|
# Job names
|
||||||
|
JOB_ONBOARDING_1 = "onboarding-1"
|
||||||
|
JOB_ONBOARDING_2 = "onboarding-2"
|
||||||
|
JOB_ONBOARDING_3 = "onboarding-3"
|
||||||
|
JOB_ONBOARDING_4 = "onboarding-4"
|
||||||
|
JOB_BATCH_IMPORT = "batch-import"
|
||||||
|
JOB_DELETE_ACCOUNT = "delete-account"
|
||||||
|
JOB_DELETE_MAILBOX = "delete-mailbox"
|
||||||
|
JOB_DELETE_DOMAIN = "delete-domain"
|
||||||
|
JOB_SEND_USER_REPORT = "send-user-report"
|
||||||
|
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||||
|
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||||
|
|
||||||
# for pagination
|
# for pagination
|
||||||
PAGE_LIMIT = 20
|
PAGE_LIMIT = 20
|
||||||
|
|
||||||
@ -607,6 +574,7 @@ SKIP_MX_LOOKUP_ON_CHECK = False
|
|||||||
|
|
||||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||||
|
|
||||||
|
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||||
|
|
||||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||||
@ -641,32 +609,3 @@ EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_
|
|||||||
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
|
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
|
||||||
# It defaults to the regular DB_URI in case it's needed
|
# It defaults to the regular DB_URI in case it's needed
|
||||||
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
|
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
|
||||||
|
|
||||||
|
|
||||||
def read_partner_dict(var: str) -> dict[int, str]:
|
|
||||||
partner_value = get_env_dict(var)
|
|
||||||
if len(partner_value) == 0:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
res: dict[int, str] = {}
|
|
||||||
for partner_id in partner_value.keys():
|
|
||||||
try:
|
|
||||||
partner_id_int = int(partner_id.strip())
|
|
||||||
res[partner_id_int] = partner_value[partner_id]
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
|
|
||||||
"PARTNER_DNS_CUSTOM_DOMAINS"
|
|
||||||
)
|
|
||||||
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
|
|
||||||
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
|
|
||||||
)
|
|
||||||
|
|
||||||
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
|
|
||||||
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
|
|
||||||
)
|
|
||||||
|
|
||||||
AUDIT_LOG_MAX_DAYS = int(os.environ.get("AUDIT_LOG_MAX_DAYS", 30))
|
|
||||||
|
@ -1,18 +1 @@
|
|||||||
import enum
|
|
||||||
|
|
||||||
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
||||||
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
|
||||||
|
|
||||||
|
|
||||||
class JobType(enum.Enum):
|
|
||||||
ONBOARDING_1 = "onboarding-1"
|
|
||||||
ONBOARDING_2 = "onboarding-2"
|
|
||||||
ONBOARDING_4 = "onboarding-4"
|
|
||||||
BATCH_IMPORT = "batch-import"
|
|
||||||
DELETE_ACCOUNT = "delete-account"
|
|
||||||
DELETE_MAILBOX = "delete-mailbox"
|
|
||||||
DELETE_DOMAIN = "delete-domain"
|
|
||||||
SEND_USER_REPORT = "send-user-report"
|
|
||||||
SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
|
||||||
SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
|
||||||
SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
|
||||||
|
@ -1,138 +0,0 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy.exc import IntegrityError
|
|
||||||
|
|
||||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
|
||||||
from app.db import Session
|
|
||||||
from app.email_utils import generate_reply_email, parse_full_address
|
|
||||||
from app.email_validation import is_valid_email
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import Contact, Alias
|
|
||||||
from app.utils import sanitize_email
|
|
||||||
|
|
||||||
|
|
||||||
class ContactCreateError(Enum):
|
|
||||||
InvalidEmail = "Invalid email"
|
|
||||||
NotAllowed = "Your plan does not allow to create contacts"
|
|
||||||
Unknown = "Unknown error when trying to create contact"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ContactCreateResult:
|
|
||||||
contact: Optional[Contact]
|
|
||||||
created: bool
|
|
||||||
error: Optional[ContactCreateError]
|
|
||||||
|
|
||||||
|
|
||||||
def __update_contact_if_needed(
|
|
||||||
contact: Contact, name: Optional[str], mail_from: Optional[str]
|
|
||||||
) -> ContactCreateResult:
|
|
||||||
if name and contact.name != name:
|
|
||||||
LOG.d(f"Setting {contact} name to {name}")
|
|
||||||
contact.name = name
|
|
||||||
Session.commit()
|
|
||||||
if mail_from and contact.mail_from is None:
|
|
||||||
LOG.d(f"Setting {contact} mail_from to {mail_from}")
|
|
||||||
contact.mail_from = mail_from
|
|
||||||
Session.commit()
|
|
||||||
return ContactCreateResult(contact, created=False, error=None)
|
|
||||||
|
|
||||||
|
|
||||||
def create_contact(
|
|
||||||
email: str,
|
|
||||||
alias: Alias,
|
|
||||||
name: Optional[str] = None,
|
|
||||||
mail_from: Optional[str] = None,
|
|
||||||
allow_empty_email: bool = False,
|
|
||||||
automatic_created: bool = False,
|
|
||||||
from_partner: bool = False,
|
|
||||||
) -> ContactCreateResult:
|
|
||||||
# If user cannot create contacts, they still need to be created when receiving an email for an alias
|
|
||||||
if not automatic_created and not alias.user.can_create_contacts():
|
|
||||||
return ContactCreateResult(
|
|
||||||
None, created=False, error=ContactCreateError.NotAllowed
|
|
||||||
)
|
|
||||||
# Parse emails with form 'name <email>'
|
|
||||||
try:
|
|
||||||
email_name, email = parse_full_address(email)
|
|
||||||
except ValueError:
|
|
||||||
email = ""
|
|
||||||
email_name = ""
|
|
||||||
# If no name is explicitly given try to get it from the parsed email
|
|
||||||
if name is None:
|
|
||||||
name = email_name[: Contact.MAX_NAME_LENGTH]
|
|
||||||
else:
|
|
||||||
name = name[: Contact.MAX_NAME_LENGTH]
|
|
||||||
# If still no name is there, make sure the name is None instead of empty string
|
|
||||||
if not name:
|
|
||||||
name = None
|
|
||||||
if name is not None and "\x00" in name:
|
|
||||||
LOG.w("Cannot use contact name because has \\x00")
|
|
||||||
name = ""
|
|
||||||
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
|
|
||||||
email = sanitize_email(email, not_lower=True)
|
|
||||||
if not is_valid_email(email):
|
|
||||||
LOG.w(f"invalid contact email {email}")
|
|
||||||
if not allow_empty_email:
|
|
||||||
return ContactCreateResult(
|
|
||||||
None, created=False, error=ContactCreateError.InvalidEmail
|
|
||||||
)
|
|
||||||
LOG.d("Create a contact with invalid email for %s", alias)
|
|
||||||
# either reuse a contact with empty email or create a new contact with empty email
|
|
||||||
email = ""
|
|
||||||
# If contact exists, update name and mail_from if needed
|
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
|
||||||
if contact is not None:
|
|
||||||
return __update_contact_if_needed(contact, name, mail_from)
|
|
||||||
# Create the contact
|
|
||||||
reply_email = generate_reply_email(email, alias)
|
|
||||||
alias_id = alias.id
|
|
||||||
try:
|
|
||||||
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
|
|
||||||
is_invalid_email = email == ""
|
|
||||||
contact = Contact.create(
|
|
||||||
user_id=alias.user_id,
|
|
||||||
alias_id=alias.id,
|
|
||||||
website_email=email,
|
|
||||||
name=name,
|
|
||||||
reply_email=reply_email,
|
|
||||||
mail_from=mail_from,
|
|
||||||
automatic_created=automatic_created,
|
|
||||||
flags=flags,
|
|
||||||
invalid_email=is_invalid_email,
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
contact_id = contact.id
|
|
||||||
if automatic_created:
|
|
||||||
trail = ". Automatically created"
|
|
||||||
else:
|
|
||||||
trail = ". Created by user action"
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.CreateContact,
|
|
||||||
message=f"Created contact {contact_id} ({email}){trail}",
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
LOG.d(
|
|
||||||
f"Created contact {contact} for alias {alias} with email {email} invalid_email={is_invalid_email}"
|
|
||||||
)
|
|
||||||
return ContactCreateResult(contact, created=True, error=None)
|
|
||||||
except IntegrityError:
|
|
||||||
Session.rollback()
|
|
||||||
LOG.info(
|
|
||||||
f"Contact with email {email} for alias_id {alias_id} already existed, fetching from DB"
|
|
||||||
)
|
|
||||||
contact: Optional[Contact] = Contact.get_by(
|
|
||||||
alias_id=alias_id, website_email=email
|
|
||||||
)
|
|
||||||
if contact:
|
|
||||||
return __update_contact_if_needed(contact, name, mail_from)
|
|
||||||
else:
|
|
||||||
LOG.warning(
|
|
||||||
f"Could not find contact with email {email} for alias_id {alias_id} and it should exist"
|
|
||||||
)
|
|
||||||
return ContactCreateResult(
|
|
||||||
None, created=False, error=ContactCreateError.Unknown
|
|
||||||
)
|
|
@ -1,149 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
import arrow
|
|
||||||
from sqlalchemy import or_, update, and_
|
|
||||||
|
|
||||||
from app.config import ADMIN_EMAIL
|
|
||||||
from app.db import Session
|
|
||||||
from app.email_utils import send_email
|
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
|
||||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import (
|
|
||||||
User,
|
|
||||||
ManualSubscription,
|
|
||||||
Coupon,
|
|
||||||
LifetimeCoupon,
|
|
||||||
PartnerSubscription,
|
|
||||||
PartnerUser,
|
|
||||||
)
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
class CouponUserCannotRedeemError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def redeem_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
|
||||||
if user.lifetime:
|
|
||||||
LOG.i(f"User {user} is a lifetime SL user. Cannot redeem coupons")
|
|
||||||
raise CouponUserCannotRedeemError()
|
|
||||||
|
|
||||||
sub = user.get_active_subscription()
|
|
||||||
if sub and not isinstance(sub, ManualSubscription):
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has an active subscription that is not manual. Cannot redeem coupon {coupon_code}"
|
|
||||||
)
|
|
||||||
raise CouponUserCannotRedeemError()
|
|
||||||
|
|
||||||
coupon = Coupon.get_by(code=coupon_code)
|
|
||||||
if not coupon:
|
|
||||||
LOG.i(f"User is trying to redeem coupon {coupon_code} that does not exist")
|
|
||||||
return None
|
|
||||||
|
|
||||||
now = arrow.utcnow()
|
|
||||||
stmt = (
|
|
||||||
update(Coupon)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
Coupon.code == coupon_code,
|
|
||||||
Coupon.used == False, # noqa: E712
|
|
||||||
or_(
|
|
||||||
Coupon.expires_date == None, # noqa: E711
|
|
||||||
Coupon.expires_date > now,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.values(used=True, used_by_user_id=user.id, updated_at=now)
|
|
||||||
)
|
|
||||||
res = Session.execute(stmt)
|
|
||||||
if res.rowcount == 0:
|
|
||||||
LOG.i(f"Coupon {coupon.id} could not be redeemed. It's expired or invalid.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
LOG.i(
|
|
||||||
f"Redeemed normal coupon {coupon.id} for {coupon.nb_year} years by user {user}"
|
|
||||||
)
|
|
||||||
if sub:
|
|
||||||
# renew existing subscription
|
|
||||||
if sub.end_at > arrow.now():
|
|
||||||
sub.end_at = sub.end_at.shift(years=coupon.nb_year)
|
|
||||||
else:
|
|
||||||
sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
|
||||||
else:
|
|
||||||
# There may be an expired manual subscription
|
|
||||||
sub = ManualSubscription.get_by(user_id=user.id)
|
|
||||||
end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
|
||||||
if sub:
|
|
||||||
sub.end_at = end_at
|
|
||||||
else:
|
|
||||||
sub = ManualSubscription.create(
|
|
||||||
user_id=user.id,
|
|
||||||
end_at=end_at,
|
|
||||||
comment="using coupon code",
|
|
||||||
is_giveaway=coupon.is_giveaway,
|
|
||||||
)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message=f"User {user} redeemed coupon {coupon.id} for {coupon.nb_year} years",
|
|
||||||
)
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
user=user,
|
|
||||||
content=EventContent(
|
|
||||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
return coupon
|
|
||||||
|
|
||||||
|
|
||||||
def redeem_lifetime_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
|
||||||
if user.lifetime:
|
|
||||||
return None
|
|
||||||
partner_sub = (
|
|
||||||
Session.query(PartnerSubscription)
|
|
||||||
.join(PartnerUser, PartnerUser.id == PartnerSubscription.partner_user_id)
|
|
||||||
.filter(PartnerUser.user_id == user.id, PartnerSubscription.lifetime == True) # noqa: E712
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if partner_sub is not None:
|
|
||||||
return None
|
|
||||||
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=coupon_code)
|
|
||||||
if not coupon:
|
|
||||||
return None
|
|
||||||
|
|
||||||
stmt = (
|
|
||||||
update(LifetimeCoupon)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
LifetimeCoupon.code == coupon_code,
|
|
||||||
LifetimeCoupon.nb_used > 0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.values(nb_used=LifetimeCoupon.nb_used - 1)
|
|
||||||
)
|
|
||||||
res = Session.execute(stmt)
|
|
||||||
if res.rowcount == 0:
|
|
||||||
LOG.i("Coupon could not be redeemed")
|
|
||||||
return None
|
|
||||||
|
|
||||||
user.lifetime = True
|
|
||||||
user.lifetime_coupon_id = coupon.id
|
|
||||||
if coupon.paid:
|
|
||||||
user.paid_lifetime = True
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
user=user,
|
|
||||||
content=EventContent(user_plan_change=UserPlanChanged(lifetime=True)),
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
# notify admin
|
|
||||||
send_email(
|
|
||||||
ADMIN_EMAIL,
|
|
||||||
subject=f"User {user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
|
||||||
plaintext="",
|
|
||||||
html="",
|
|
||||||
)
|
|
||||||
|
|
||||||
return coupon
|
|
@ -1,206 +0,0 @@
|
|||||||
import arrow
|
|
||||||
import re
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from app.constants import JobType
|
|
||||||
from app.db import Session
|
|
||||||
from app.email_utils import get_email_domain_part
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
|
|
||||||
_MAX_MAILBOXES_PER_DOMAIN = 20
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CreateCustomDomainResult:
|
|
||||||
message: str = ""
|
|
||||||
message_category: str = ""
|
|
||||||
success: bool = False
|
|
||||||
instance: Optional[CustomDomain] = None
|
|
||||||
redirect: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CannotUseDomainReason(Enum):
|
|
||||||
InvalidDomain = 1
|
|
||||||
BuiltinDomain = 2
|
|
||||||
DomainAlreadyUsed = 3
|
|
||||||
DomainPartOfUserEmail = 4
|
|
||||||
DomainUserInMailbox = 5
|
|
||||||
|
|
||||||
def message(self, domain: str) -> str:
|
|
||||||
if self == CannotUseDomainReason.InvalidDomain:
|
|
||||||
return "This is not a valid domain"
|
|
||||||
elif self == CannotUseDomainReason.BuiltinDomain:
|
|
||||||
return "A custom domain cannot be a built-in domain."
|
|
||||||
elif self == CannotUseDomainReason.DomainAlreadyUsed:
|
|
||||||
return f"{domain} already used"
|
|
||||||
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
|
|
||||||
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
|
|
||||||
elif self == CannotUseDomainReason.DomainUserInMailbox:
|
|
||||||
return f"{domain} already used in a SimpleLogin mailbox"
|
|
||||||
else:
|
|
||||||
raise Exception("Invalid CannotUseDomainReason")
|
|
||||||
|
|
||||||
|
|
||||||
class CannotSetCustomDomainMailboxesCause(Enum):
|
|
||||||
InvalidMailbox = "Something went wrong, please retry"
|
|
||||||
NoMailboxes = "You must select at least 1 mailbox"
|
|
||||||
TooManyMailboxes = (
|
|
||||||
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class SetCustomDomainMailboxesResult:
|
|
||||||
success: bool
|
|
||||||
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_domain(domain: str) -> bool:
|
|
||||||
"""
|
|
||||||
Checks that a domain is valid according to RFC 1035
|
|
||||||
"""
|
|
||||||
if len(domain) > 255:
|
|
||||||
return False
|
|
||||||
if domain.endswith("."):
|
|
||||||
domain = domain[:-1] # Strip the trailing dot
|
|
||||||
labels = domain.split(".")
|
|
||||||
if not labels:
|
|
||||||
return False
|
|
||||||
for label in labels:
|
|
||||||
if not _ALLOWED_DOMAIN_REGEX.match(label):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_domain(domain: str) -> str:
|
|
||||||
new_domain = domain.lower().strip()
|
|
||||||
if new_domain.startswith("http://"):
|
|
||||||
new_domain = new_domain[len("http://") :]
|
|
||||||
|
|
||||||
if new_domain.startswith("https://"):
|
|
||||||
new_domain = new_domain[len("https://") :]
|
|
||||||
|
|
||||||
return new_domain
|
|
||||||
|
|
||||||
|
|
||||||
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
|
|
||||||
if not is_valid_domain(domain):
|
|
||||||
return CannotUseDomainReason.InvalidDomain
|
|
||||||
elif SLDomain.get_by(domain=domain):
|
|
||||||
return CannotUseDomainReason.BuiltinDomain
|
|
||||||
elif CustomDomain.get_by(domain=domain):
|
|
||||||
return CannotUseDomainReason.DomainAlreadyUsed
|
|
||||||
elif get_email_domain_part(user.email) == domain:
|
|
||||||
return CannotUseDomainReason.DomainPartOfUserEmail
|
|
||||||
elif Mailbox.filter(
|
|
||||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
|
|
||||||
).first():
|
|
||||||
return CannotUseDomainReason.DomainUserInMailbox
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def create_custom_domain(
|
|
||||||
user: User, domain: str, partner_id: Optional[int] = None
|
|
||||||
) -> CreateCustomDomainResult:
|
|
||||||
if not user.is_premium():
|
|
||||||
return CreateCustomDomainResult(
|
|
||||||
message="Only premium plan can add custom domain",
|
|
||||||
message_category="warning",
|
|
||||||
)
|
|
||||||
|
|
||||||
new_domain = sanitize_domain(domain)
|
|
||||||
domain_forbidden_cause = can_domain_be_used(user, new_domain)
|
|
||||||
if domain_forbidden_cause:
|
|
||||||
return CreateCustomDomainResult(
|
|
||||||
message=domain_forbidden_cause.message(new_domain), message_category="error"
|
|
||||||
)
|
|
||||||
|
|
||||||
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
|
|
||||||
|
|
||||||
# new domain has ownership verified if its parent has the ownership verified
|
|
||||||
for root_cd in user.custom_domains:
|
|
||||||
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
|
|
||||||
LOG.i(
|
|
||||||
"%s ownership verified thanks to %s",
|
|
||||||
new_custom_domain,
|
|
||||||
root_cd,
|
|
||||||
)
|
|
||||||
new_custom_domain.ownership_verified = True
|
|
||||||
|
|
||||||
# Add the partner_id in case it's passed
|
|
||||||
if partner_id is not None:
|
|
||||||
new_custom_domain.partner_id = partner_id
|
|
||||||
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.CreateCustomDomain,
|
|
||||||
message=f"Created custom domain {new_custom_domain.id} ({new_domain})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
return CreateCustomDomainResult(
|
|
||||||
success=True,
|
|
||||||
instance=new_custom_domain,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def delete_custom_domain(domain: CustomDomain):
|
|
||||||
# Schedule delete domain job
|
|
||||||
LOG.w("schedule delete domain job for %s", domain)
|
|
||||||
domain.pending_deletion = True
|
|
||||||
Job.create(
|
|
||||||
name=JobType.DELETE_DOMAIN.value,
|
|
||||||
payload={"custom_domain_id": domain.id},
|
|
||||||
run_at=arrow.now(),
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def set_custom_domain_mailboxes(
|
|
||||||
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
|
|
||||||
) -> SetCustomDomainMailboxesResult:
|
|
||||||
if len(mailbox_ids) == 0:
|
|
||||||
return SetCustomDomainMailboxesResult(
|
|
||||||
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
|
|
||||||
)
|
|
||||||
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
|
|
||||||
return SetCustomDomainMailboxesResult(
|
|
||||||
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
|
|
||||||
)
|
|
||||||
|
|
||||||
mailboxes = (
|
|
||||||
Session.query(Mailbox)
|
|
||||||
.filter(
|
|
||||||
Mailbox.id.in_(mailbox_ids),
|
|
||||||
Mailbox.user_id == user_id,
|
|
||||||
Mailbox.verified == True, # noqa: E712
|
|
||||||
)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
if len(mailboxes) != len(mailbox_ids):
|
|
||||||
return SetCustomDomainMailboxesResult(
|
|
||||||
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
|
|
||||||
)
|
|
||||||
|
|
||||||
# first remove all existing domain-mailboxes links
|
|
||||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
for mailbox in mailboxes:
|
|
||||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
|
||||||
|
|
||||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=custom_domain.user,
|
|
||||||
action=UserAuditLogAction.UpdateCustomDomain,
|
|
||||||
message=f"Updated custom domain {custom_domain.id} mailboxes (domain={custom_domain.domain}) (mailboxes={mailboxes_as_str})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
return SetCustomDomainMailboxesResult(success=True)
|
|
@ -1,293 +1,37 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.constants import DMARC_RECORD
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import (
|
from app.dns_utils import get_cname_record
|
||||||
DNSClient,
|
|
||||||
get_network_dns_client,
|
|
||||||
)
|
|
||||||
from app.models import CustomDomain
|
from app.models import CustomDomain
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import random_string
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class DomainValidationResult:
|
|
||||||
success: bool
|
|
||||||
errors: [str]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ExpectedValidationRecords:
|
|
||||||
recommended: str
|
|
||||||
allowed: list[str]
|
|
||||||
|
|
||||||
|
|
||||||
def is_mx_equivalent(
|
|
||||||
mx_domains: dict[int, list[str]],
|
|
||||||
expected_mx_domains: dict[int, ExpectedValidationRecords],
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
|
||||||
mx_domains and ref_mx_domains are list of (priority, domain)
|
|
||||||
|
|
||||||
The priority order is taken into account but not the priority number.
|
|
||||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
|
||||||
"""
|
|
||||||
|
|
||||||
expected_prios = []
|
|
||||||
for prio in expected_mx_domains:
|
|
||||||
expected_prios.append(prio)
|
|
||||||
|
|
||||||
if len(expected_prios) != len(mx_domains):
|
|
||||||
return False
|
|
||||||
|
|
||||||
for prio_position, prio_value in enumerate(sorted(mx_domains.keys())):
|
|
||||||
for domain in mx_domains[prio_value]:
|
|
||||||
if domain not in expected_mx_domains[expected_prios[prio_position]].allowed:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class CustomDomainValidation:
|
class CustomDomainValidation:
|
||||||
def __init__(
|
def __init__(self, dkim_domain: str):
|
||||||
self,
|
|
||||||
dkim_domain: str,
|
|
||||||
dns_client: DNSClient = get_network_dns_client(),
|
|
||||||
partner_domains: Optional[dict[int, str]] = None,
|
|
||||||
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
|
|
||||||
):
|
|
||||||
self.dkim_domain = dkim_domain
|
self.dkim_domain = dkim_domain
|
||||||
self._dns_client = dns_client
|
self._dkim_records = {
|
||||||
self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
|
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}")
|
||||||
self._partner_domain_validation_prefixes = (
|
for key in ("dkim", "dkim02", "dkim03")
|
||||||
partner_domains_validation_prefixes
|
|
||||||
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_ownership_verification_record(
|
|
||||||
self, domain: CustomDomain
|
|
||||||
) -> ExpectedValidationRecords:
|
|
||||||
prefixes = ["sl"]
|
|
||||||
if (
|
|
||||||
domain.partner_id is not None
|
|
||||||
and domain.partner_id in self._partner_domain_validation_prefixes
|
|
||||||
):
|
|
||||||
prefixes.insert(
|
|
||||||
0, self._partner_domain_validation_prefixes[domain.partner_id]
|
|
||||||
)
|
|
||||||
|
|
||||||
if not domain.ownership_txt_token:
|
|
||||||
domain.ownership_txt_token = random_string(30)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
valid = [
|
|
||||||
f"{prefix}-verification={domain.ownership_txt_token}" for prefix in prefixes
|
|
||||||
]
|
|
||||||
return ExpectedValidationRecords(recommended=valid[0], allowed=valid)
|
|
||||||
|
|
||||||
def get_expected_mx_records(
|
|
||||||
self, domain: CustomDomain
|
|
||||||
) -> dict[int, ExpectedValidationRecords]:
|
|
||||||
records = {}
|
|
||||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
|
||||||
domain = self._partner_domains[domain.partner_id]
|
|
||||||
records[10] = [f"mx1.{domain}."]
|
|
||||||
records[20] = [f"mx2.{domain}."]
|
|
||||||
# Default ones
|
|
||||||
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
|
||||||
if priority not in records:
|
|
||||||
records[priority] = []
|
|
||||||
records[priority].append(domain)
|
|
||||||
|
|
||||||
return {
|
|
||||||
priority: ExpectedValidationRecords(
|
|
||||||
recommended=records[priority][0], allowed=records[priority]
|
|
||||||
)
|
|
||||||
for priority in records
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_expected_spf_domain(
|
def get_dkim_records(self) -> {str: str}:
|
||||||
self, domain: CustomDomain
|
|
||||||
) -> ExpectedValidationRecords:
|
|
||||||
records = []
|
|
||||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
|
||||||
records.append(self._partner_domains[domain.partner_id])
|
|
||||||
else:
|
|
||||||
records.append(config.EMAIL_DOMAIN)
|
|
||||||
return ExpectedValidationRecords(recommended=records[0], allowed=records)
|
|
||||||
|
|
||||||
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
|
||||||
spf_domain = self.get_expected_spf_domain(domain)
|
|
||||||
return f"v=spf1 include:{spf_domain.recommended} ~all"
|
|
||||||
|
|
||||||
def get_dkim_records(
|
|
||||||
self, domain: CustomDomain
|
|
||||||
) -> {str: ExpectedValidationRecords}:
|
|
||||||
"""
|
"""
|
||||||
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
Get a list of dkim records to set up. It will be
|
||||||
it will return the default ones or the partner ones.
|
|
||||||
"""
|
"""
|
||||||
|
return self._dkim_records
|
||||||
# By default use the default domain
|
|
||||||
dkim_domains = [self.dkim_domain]
|
|
||||||
if domain.partner_id is not None:
|
|
||||||
# Domain is from a partner. Retrieve the partner config and use that domain as preferred if it exists
|
|
||||||
partner_domain = self._partner_domains.get(domain.partner_id, None)
|
|
||||||
if partner_domain is not None:
|
|
||||||
dkim_domains.insert(0, partner_domain)
|
|
||||||
|
|
||||||
output = {}
|
|
||||||
for key in ("dkim", "dkim02", "dkim03"):
|
|
||||||
records = [
|
|
||||||
f"{key}._domainkey.{dkim_domain}" for dkim_domain in dkim_domains
|
|
||||||
]
|
|
||||||
output[f"{key}._domainkey"] = ExpectedValidationRecords(
|
|
||||||
recommended=records[0], allowed=records
|
|
||||||
)
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Check if dkim records are properly set for this custom domain.
|
Check if dkim records are properly set for this custom domain.
|
||||||
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
|
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
|
||||||
"""
|
"""
|
||||||
correct_records = {}
|
|
||||||
invalid_records = {}
|
invalid_records = {}
|
||||||
expected_records = self.get_dkim_records(custom_domain)
|
for prefix, expected_record in self.get_dkim_records():
|
||||||
for prefix, expected_record in expected_records.items():
|
|
||||||
custom_record = f"{prefix}.{custom_domain.domain}"
|
custom_record = f"{prefix}.{custom_domain.domain}"
|
||||||
dkim_record = self._dns_client.get_cname_record(custom_record)
|
dkim_record = get_cname_record(custom_record)
|
||||||
if dkim_record in expected_record.allowed:
|
if dkim_record != expected_record:
|
||||||
correct_records[prefix] = custom_record
|
|
||||||
else:
|
|
||||||
invalid_records[custom_record] = dkim_record or "empty"
|
invalid_records[custom_record] = dkim_record or "empty"
|
||||||
|
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
|
||||||
# HACK
|
|
||||||
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
|
|
||||||
# the domain validated to continue seeing it as validated (although showing them the missing records).
|
|
||||||
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
|
|
||||||
# we will remove the dkim_verified flag.
|
|
||||||
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
|
|
||||||
if custom_domain.dkim_verified:
|
if custom_domain.dkim_verified:
|
||||||
# Check if at least the original dkim is there
|
|
||||||
if correct_records.get("dkim._domainkey") is not None:
|
|
||||||
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
|
|
||||||
return invalid_records
|
return invalid_records
|
||||||
|
|
||||||
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
|
|
||||||
# rest of the code path, returning the invalid records and clearing the flag
|
|
||||||
custom_domain.dkim_verified = len(invalid_records) == 0
|
custom_domain.dkim_verified = len(invalid_records) == 0
|
||||||
if custom_domain.dkim_verified:
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=custom_domain.user,
|
|
||||||
action=UserAuditLogAction.VerifyCustomDomain,
|
|
||||||
message=f"Verified DKIM records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return invalid_records
|
return invalid_records
|
||||||
|
|
||||||
def validate_domain_ownership(
|
|
||||||
self, custom_domain: CustomDomain
|
|
||||||
) -> DomainValidationResult:
|
|
||||||
"""
|
|
||||||
Check if the custom_domain has added the ownership verification records
|
|
||||||
"""
|
|
||||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
|
||||||
expected_verification_records = self.get_ownership_verification_record(
|
|
||||||
custom_domain
|
|
||||||
)
|
|
||||||
found = False
|
|
||||||
for verification_record in expected_verification_records.allowed:
|
|
||||||
if verification_record in txt_records:
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
if found:
|
|
||||||
custom_domain.ownership_verified = True
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=custom_domain.user,
|
|
||||||
action=UserAuditLogAction.VerifyCustomDomain,
|
|
||||||
message=f"Verified ownership for custom domain {custom_domain.id} ({custom_domain.domain})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
return DomainValidationResult(success=True, errors=[])
|
|
||||||
else:
|
|
||||||
return DomainValidationResult(success=False, errors=txt_records)
|
|
||||||
|
|
||||||
def validate_mx_records(
|
|
||||||
self, custom_domain: CustomDomain
|
|
||||||
) -> DomainValidationResult:
|
|
||||||
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
|
|
||||||
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
|
||||||
|
|
||||||
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
|
||||||
errors = []
|
|
||||||
for prio in mx_domains:
|
|
||||||
for mx_domain in mx_domains[prio]:
|
|
||||||
errors.append(f"{prio} {mx_domain}")
|
|
||||||
return DomainValidationResult(success=False, errors=errors)
|
|
||||||
else:
|
|
||||||
custom_domain.verified = True
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=custom_domain.user,
|
|
||||||
action=UserAuditLogAction.VerifyCustomDomain,
|
|
||||||
message=f"Verified MX records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
return DomainValidationResult(success=True, errors=[])
|
|
||||||
|
|
||||||
def validate_spf_records(
|
|
||||||
self, custom_domain: CustomDomain
|
|
||||||
) -> DomainValidationResult:
|
|
||||||
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
|
||||||
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
|
||||||
if len(set(expected_spf_domain.allowed).intersection(set(spf_domains))) > 0:
|
|
||||||
custom_domain.spf_verified = True
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=custom_domain.user,
|
|
||||||
action=UserAuditLogAction.VerifyCustomDomain,
|
|
||||||
message=f"Verified SPF records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
return DomainValidationResult(success=True, errors=[])
|
|
||||||
else:
|
|
||||||
custom_domain.spf_verified = False
|
|
||||||
Session.commit()
|
|
||||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
|
||||||
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
|
|
||||||
return DomainValidationResult(
|
|
||||||
success=False,
|
|
||||||
errors=cleaned_records,
|
|
||||||
)
|
|
||||||
|
|
||||||
def validate_dmarc_records(
|
|
||||||
self, custom_domain: CustomDomain
|
|
||||||
) -> DomainValidationResult:
|
|
||||||
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
|
|
||||||
if DMARC_RECORD in txt_records:
|
|
||||||
custom_domain.dmarc_verified = True
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=custom_domain.user,
|
|
||||||
action=UserAuditLogAction.VerifyCustomDomain,
|
|
||||||
message=f"Verified DMARC records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
return DomainValidationResult(success=True, errors=[])
|
|
||||||
else:
|
|
||||||
custom_domain.dmarc_verified = False
|
|
||||||
Session.commit()
|
|
||||||
return DomainValidationResult(success=False, errors=txt_records)
|
|
||||||
|
|
||||||
def __clean_spf_records(
|
|
||||||
self, txt_records: List[str], custom_domain: CustomDomain
|
|
||||||
) -> List[str]:
|
|
||||||
final_records = []
|
|
||||||
verification_records = self.get_ownership_verification_record(custom_domain)
|
|
||||||
for record in txt_records:
|
|
||||||
if record not in verification_records.allowed:
|
|
||||||
final_records.append(record)
|
|
||||||
return final_records
|
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
import secrets
|
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from flask import (
|
from flask import (
|
||||||
render_template,
|
render_template,
|
||||||
@ -39,7 +37,7 @@ from app.models import (
|
|||||||
SenderFormatEnum,
|
SenderFormatEnum,
|
||||||
UnsubscribeBehaviourEnum,
|
UnsubscribeBehaviourEnum,
|
||||||
)
|
)
|
||||||
from app.proton.proton_unlink import perform_proton_account_unlink
|
from app.proton.utils import perform_proton_account_unlink
|
||||||
from app.utils import (
|
from app.utils import (
|
||||||
random_string,
|
random_string,
|
||||||
CSRFValidationForm,
|
CSRFValidationForm,
|
||||||
@ -165,7 +163,7 @@ def send_reset_password_email(user):
|
|||||||
"""
|
"""
|
||||||
# the activation code is valid for 1h
|
# the activation code is valid for 1h
|
||||||
reset_password_code = ResetPasswordCode.create(
|
reset_password_code = ResetPasswordCode.create(
|
||||||
user_id=user.id, code=secrets.token_urlsafe(32)
|
user_id=user.id, code=random_string(60)
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -239,8 +237,6 @@ def unlink_proton_account():
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
|
|
||||||
if not perform_proton_account_unlink(current_user):
|
perform_proton_account_unlink(current_user)
|
||||||
flash("Account cannot be unlinked", "warning")
|
|
||||||
else:
|
|
||||||
flash("Your Proton account has been unlinked", "success")
|
flash("Your Proton account has been unlinked", "success")
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from operator import or_
|
from operator import or_
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from flask import render_template, request, redirect, flash
|
from flask import render_template, request, redirect, flash
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
@ -10,11 +9,13 @@ from sqlalchemy import and_, func, case
|
|||||||
from wtforms import StringField, validators, ValidationError
|
from wtforms import StringField, validators, ValidationError
|
||||||
|
|
||||||
# Need to import directly from config to allow modification from the tests
|
# Need to import directly from config to allow modification from the tests
|
||||||
from app import config, parallel_limiter, contact_utils
|
from app import config, parallel_limiter
|
||||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
|
||||||
from app.contact_utils import ContactCreateError
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.email_utils import (
|
||||||
|
generate_reply_email,
|
||||||
|
parse_full_address,
|
||||||
|
)
|
||||||
from app.email_validation import is_valid_email
|
from app.email_validation import is_valid_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
CannotCreateContactForReverseAlias,
|
CannotCreateContactForReverseAlias,
|
||||||
@ -23,8 +24,8 @@ from app.errors import (
|
|||||||
ErrContactAlreadyExists,
|
ErrContactAlreadyExists,
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Alias, Contact, EmailLog
|
from app.models import Alias, Contact, EmailLog, User
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import sanitize_email, CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
def email_validator():
|
def email_validator():
|
||||||
@ -50,7 +51,7 @@ def email_validator():
|
|||||||
return _check
|
return _check
|
||||||
|
|
||||||
|
|
||||||
def create_contact(alias: Alias, contact_address: str) -> Contact:
|
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||||
"""
|
"""
|
||||||
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
||||||
Can throw exceptions:
|
Can throw exceptions:
|
||||||
@ -60,23 +61,37 @@ def create_contact(alias: Alias, contact_address: str) -> Contact:
|
|||||||
"""
|
"""
|
||||||
if not contact_address:
|
if not contact_address:
|
||||||
raise ErrAddressInvalid("Empty address")
|
raise ErrAddressInvalid("Empty address")
|
||||||
output = contact_utils.create_contact(email=contact_address, alias=alias)
|
try:
|
||||||
if output.error == ContactCreateError.InvalidEmail:
|
contact_name, contact_email = parse_full_address(contact_address)
|
||||||
|
except ValueError:
|
||||||
raise ErrAddressInvalid(contact_address)
|
raise ErrAddressInvalid(contact_address)
|
||||||
elif output.error == ContactCreateError.NotAllowed:
|
|
||||||
raise ErrContactErrorUpgradeNeeded()
|
|
||||||
elif output.error is not None:
|
|
||||||
raise ErrAddressInvalid("Invalid address")
|
|
||||||
elif not output.created:
|
|
||||||
raise ErrContactAlreadyExists(output.contact)
|
|
||||||
|
|
||||||
contact = output.contact
|
contact_email = sanitize_email(contact_email)
|
||||||
|
if not is_valid_email(contact_email):
|
||||||
|
raise ErrAddressInvalid(contact_email)
|
||||||
|
|
||||||
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||||
|
if contact:
|
||||||
|
raise ErrContactAlreadyExists(contact)
|
||||||
|
|
||||||
|
if not user.can_create_contacts():
|
||||||
|
raise ErrContactErrorUpgradeNeeded()
|
||||||
|
|
||||||
|
contact = Contact.create(
|
||||||
|
user_id=alias.user_id,
|
||||||
|
alias_id=alias.id,
|
||||||
|
website_email=contact_email,
|
||||||
|
name=contact_name,
|
||||||
|
reply_email=generate_reply_email(contact_email, alias),
|
||||||
|
)
|
||||||
|
|
||||||
LOG.d(
|
LOG.d(
|
||||||
"create reverse-alias for %s %s, reverse alias:%s",
|
"create reverse-alias for %s %s, reverse alias:%s",
|
||||||
contact_address,
|
contact_address,
|
||||||
alias,
|
alias,
|
||||||
contact.reply_email,
|
contact.reply_email,
|
||||||
)
|
)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
return contact
|
return contact
|
||||||
|
|
||||||
@ -192,7 +207,7 @@ def get_contact_infos(
|
|||||||
|
|
||||||
|
|
||||||
def delete_contact(alias: Alias, contact_id: int):
|
def delete_contact(alias: Alias, contact_id: int):
|
||||||
contact: Optional[Contact] = Contact.get(contact_id)
|
contact = Contact.get(contact_id)
|
||||||
|
|
||||||
if not contact:
|
if not contact:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -200,11 +215,6 @@ def delete_contact(alias: Alias, contact_id: int):
|
|||||||
flash("You cannot delete reverse-alias", "warning")
|
flash("You cannot delete reverse-alias", "warning")
|
||||||
else:
|
else:
|
||||||
delete_contact_email = contact.website_email
|
delete_contact_email = contact.website_email
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.DeleteContact,
|
|
||||||
message=f"Delete contact {contact_id} ({contact.email})",
|
|
||||||
)
|
|
||||||
Contact.delete(contact_id)
|
Contact.delete(contact_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -227,10 +237,7 @@ def alias_contact_manager(alias_id):
|
|||||||
|
|
||||||
page = 0
|
page = 0
|
||||||
if request.args.get("page"):
|
if request.args.get("page"):
|
||||||
try:
|
|
||||||
page = int(request.args.get("page"))
|
page = int(request.args.get("page"))
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
query = request.args.get("query") or ""
|
query = request.args.get("query") or ""
|
||||||
|
|
||||||
@ -254,7 +261,7 @@ def alias_contact_manager(alias_id):
|
|||||||
if new_contact_form.validate():
|
if new_contact_form.validate():
|
||||||
contact_address = new_contact_form.email.data.strip()
|
contact_address = new_contact_form.email.data.strip()
|
||||||
try:
|
try:
|
||||||
contact = create_contact(alias, contact_address)
|
contact = create_contact(current_user, alias, contact_address)
|
||||||
except (
|
except (
|
||||||
ErrContactErrorUpgradeNeeded,
|
ErrContactErrorUpgradeNeeded,
|
||||||
ErrAddressInvalid,
|
ErrAddressInvalid,
|
||||||
|
@ -7,7 +7,6 @@ from flask import render_template, redirect, url_for, flash, request
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
|
||||||
from app.alias_utils import transfer_alias
|
from app.alias_utils import transfer_alias
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
@ -58,12 +57,6 @@ def alias_transfer_send_route(alias_id):
|
|||||||
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
||||||
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
||||||
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
||||||
|
|
||||||
emit_alias_audit_log(
|
|
||||||
alias,
|
|
||||||
AliasAuditLogAction.InitiateTransferAlias,
|
|
||||||
"Initiated alias transfer",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
alias_transfer_url = (
|
alias_transfer_url = (
|
||||||
config.URL
|
config.URL
|
||||||
|
@ -3,7 +3,7 @@ from flask import render_template, flash, request, redirect, url_for
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from app import s3
|
from app import s3
|
||||||
from app.constants import JobType
|
from app.config import JOB_BATCH_IMPORT
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
@ -64,7 +64,7 @@ def batch_import_route():
|
|||||||
|
|
||||||
# Schedule batch import job
|
# Schedule batch import job
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.BATCH_IMPORT.value,
|
name=JOB_BATCH_IMPORT,
|
||||||
payload={"batch_import_id": bi.id},
|
payload={"batch_import_id": bi.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
)
|
)
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Contact
|
from app.models import Contact
|
||||||
@ -23,7 +20,7 @@ class PGPContactForm(FlaskForm):
|
|||||||
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def contact_detail_route(contact_id):
|
def contact_detail_route(contact_id):
|
||||||
contact: Optional[Contact] = Contact.get(contact_id)
|
contact = Contact.get(contact_id)
|
||||||
if not contact or contact.user_id != current_user.id:
|
if not contact or contact.user_id != current_user.id:
|
||||||
flash("You cannot see this page", "warning")
|
flash("You cannot see this page", "warning")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
@ -53,11 +50,6 @@ def contact_detail_route(contact_id):
|
|||||||
except PGPException:
|
except PGPException:
|
||||||
flash("Cannot add the public key, please verify it", "error")
|
flash("Cannot add the public key, please verify it", "error")
|
||||||
else:
|
else:
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.UpdateContact,
|
|
||||||
message=f"Added PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"PGP public key for {contact.email} is saved successfully",
|
f"PGP public key for {contact.email} is saved successfully",
|
||||||
@ -70,11 +62,6 @@ def contact_detail_route(contact_id):
|
|||||||
)
|
)
|
||||||
elif pgp_form.action.data == "remove":
|
elif pgp_form.action.data == "remove":
|
||||||
# Free user can decide to remove contact PGP key
|
# Free user can decide to remove contact PGP key
|
||||||
emit_alias_audit_log(
|
|
||||||
alias=alias,
|
|
||||||
action=AliasAuditLogAction.UpdateContact,
|
|
||||||
message=f"Removed PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
|
||||||
)
|
|
||||||
contact.pgp_public_key = None
|
contact.pgp_public_key = None
|
||||||
contact.pgp_finger_print = None
|
contact.pgp_finger_print = None
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -1,15 +1,17 @@
|
|||||||
import arrow
|
import arrow
|
||||||
from flask import render_template, flash, redirect, url_for
|
from flask import render_template, flash, redirect, url_for, request
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
from app import parallel_limiter
|
from app import parallel_limiter
|
||||||
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
||||||
from app.coupon_utils import redeem_coupon, CouponUserCannotRedeemError
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
|
ManualSubscription,
|
||||||
|
Coupon,
|
||||||
Subscription,
|
Subscription,
|
||||||
AppleSubscription,
|
AppleSubscription,
|
||||||
CoinbaseSubscription,
|
CoinbaseSubscription,
|
||||||
@ -56,23 +58,56 @@ def coupon_route():
|
|||||||
|
|
||||||
if coupon_form.validate_on_submit():
|
if coupon_form.validate_on_submit():
|
||||||
code = coupon_form.code.data
|
code = coupon_form.code.data
|
||||||
try:
|
|
||||||
coupon = redeem_coupon(code, current_user)
|
coupon: Coupon = Coupon.get_by(code=code)
|
||||||
if coupon:
|
if coupon and not coupon.used:
|
||||||
|
if coupon.expires_date and coupon.expires_date < arrow.now():
|
||||||
|
flash(
|
||||||
|
f"The coupon was expired on {coupon.expires_date.humanize()}",
|
||||||
|
"error",
|
||||||
|
)
|
||||||
|
return redirect(request.url)
|
||||||
|
|
||||||
|
updated = (
|
||||||
|
Session.query(Coupon)
|
||||||
|
.filter_by(code=code, used=False)
|
||||||
|
.update({"used_by_user_id": current_user.id, "used": True})
|
||||||
|
)
|
||||||
|
if updated != 1:
|
||||||
|
flash("Coupon is not valid", "error")
|
||||||
|
return redirect(request.url)
|
||||||
|
|
||||||
|
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||||
|
user_id=current_user.id
|
||||||
|
)
|
||||||
|
if manual_sub:
|
||||||
|
# renew existing subscription
|
||||||
|
if manual_sub.end_at > arrow.now():
|
||||||
|
manual_sub.end_at = manual_sub.end_at.shift(years=coupon.nb_year)
|
||||||
|
else:
|
||||||
|
manual_sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||||
|
Session.commit()
|
||||||
|
flash(
|
||||||
|
f"Your current subscription is extended to {manual_sub.end_at.humanize()}",
|
||||||
|
"success",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
ManualSubscription.create(
|
||||||
|
user_id=current_user.id,
|
||||||
|
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
|
||||||
|
comment="using coupon code",
|
||||||
|
is_giveaway=coupon.is_giveaway,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
flash(
|
flash(
|
||||||
"Your account has been upgraded to Premium, thanks for your support!",
|
"Your account has been upgraded to Premium, thanks for your support!",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
flash(
|
flash(f"Code *{code}* expired or invalid", "warning")
|
||||||
"This coupon cannot be redeemed. It's invalid or has expired",
|
|
||||||
"warning",
|
|
||||||
)
|
|
||||||
except CouponUserCannotRedeemError:
|
|
||||||
flash(
|
|
||||||
"You have an active subscription. Please remove it before redeeming a coupon",
|
|
||||||
"warning",
|
|
||||||
)
|
|
||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"dashboard/coupon.html",
|
"dashboard/coupon.html",
|
||||||
|
@ -5,9 +5,11 @@ from wtforms import StringField, validators
|
|||||||
|
|
||||||
from app import parallel_limiter
|
from app import parallel_limiter
|
||||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY
|
from app.config import EMAIL_SERVERS_WITH_PRIORITY
|
||||||
from app.custom_domain_utils import create_custom_domain
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.models import CustomDomain
|
from app.db import Session
|
||||||
|
from app.email_utils import get_email_domain_part
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
|
||||||
|
|
||||||
|
|
||||||
class NewCustomDomainForm(FlaskForm):
|
class NewCustomDomainForm(FlaskForm):
|
||||||
@ -21,12 +23,13 @@ class NewCustomDomainForm(FlaskForm):
|
|||||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||||
def custom_domain():
|
def custom_domain():
|
||||||
custom_domains = CustomDomain.filter_by(
|
custom_domains = CustomDomain.filter_by(
|
||||||
user_id=current_user.id,
|
user_id=current_user.id, is_sl_subdomain=False
|
||||||
is_sl_subdomain=False,
|
|
||||||
pending_deletion=False,
|
|
||||||
).all()
|
).all()
|
||||||
|
mailboxes = current_user.mailboxes()
|
||||||
new_custom_domain_form = NewCustomDomainForm()
|
new_custom_domain_form = NewCustomDomainForm()
|
||||||
|
|
||||||
|
errors = {}
|
||||||
|
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
if request.form.get("form-name") == "create":
|
if request.form.get("form-name") == "create":
|
||||||
if not current_user.is_premium():
|
if not current_user.is_premium():
|
||||||
@ -34,25 +37,87 @@ def custom_domain():
|
|||||||
return redirect(url_for("dashboard.custom_domain"))
|
return redirect(url_for("dashboard.custom_domain"))
|
||||||
|
|
||||||
if new_custom_domain_form.validate():
|
if new_custom_domain_form.validate():
|
||||||
res = create_custom_domain(
|
new_domain = new_custom_domain_form.domain.data.lower().strip()
|
||||||
user=current_user, domain=new_custom_domain_form.domain.data
|
|
||||||
|
if new_domain.startswith("http://"):
|
||||||
|
new_domain = new_domain[len("http://") :]
|
||||||
|
|
||||||
|
if new_domain.startswith("https://"):
|
||||||
|
new_domain = new_domain[len("https://") :]
|
||||||
|
|
||||||
|
if SLDomain.get_by(domain=new_domain):
|
||||||
|
flash("A custom domain cannot be a built-in domain.", "error")
|
||||||
|
elif CustomDomain.get_by(domain=new_domain):
|
||||||
|
flash(f"{new_domain} already used", "error")
|
||||||
|
elif get_email_domain_part(current_user.email) == new_domain:
|
||||||
|
flash(
|
||||||
|
"You cannot add a domain that you are currently using for your personal email. "
|
||||||
|
"Please change your personal email to your real email",
|
||||||
|
"error",
|
||||||
)
|
)
|
||||||
if res.success:
|
elif Mailbox.filter(
|
||||||
flash(f"New domain {res.instance.domain} is created", "success")
|
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
|
||||||
|
).first():
|
||||||
|
flash(
|
||||||
|
f"{new_domain} already used in a SimpleLogin mailbox", "error"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
new_custom_domain = CustomDomain.create(
|
||||||
|
domain=new_domain, user_id=current_user.id
|
||||||
|
)
|
||||||
|
# new domain has ownership verified if its parent has the ownership verified
|
||||||
|
for root_cd in current_user.custom_domains:
|
||||||
|
if (
|
||||||
|
new_domain.endswith("." + root_cd.domain)
|
||||||
|
and root_cd.ownership_verified
|
||||||
|
):
|
||||||
|
LOG.i(
|
||||||
|
"%s ownership verified thanks to %s",
|
||||||
|
new_custom_domain,
|
||||||
|
root_cd,
|
||||||
|
)
|
||||||
|
new_custom_domain.ownership_verified = True
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||||
|
if mailbox_ids:
|
||||||
|
# check if mailbox is not tempered with
|
||||||
|
mailboxes = []
|
||||||
|
for mailbox_id in mailbox_ids:
|
||||||
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
if (
|
||||||
|
not mailbox
|
||||||
|
or mailbox.user_id != current_user.id
|
||||||
|
or not mailbox.verified
|
||||||
|
):
|
||||||
|
flash("Something went wrong, please retry", "warning")
|
||||||
|
return redirect(url_for("dashboard.custom_domain"))
|
||||||
|
mailboxes.append(mailbox)
|
||||||
|
|
||||||
|
for mailbox in mailboxes:
|
||||||
|
DomainMailbox.create(
|
||||||
|
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
flash(
|
||||||
|
f"New domain {new_custom_domain.domain} is created", "success"
|
||||||
|
)
|
||||||
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.domain_detail_dns",
|
"dashboard.domain_detail_dns",
|
||||||
custom_domain_id=res.instance.id,
|
custom_domain_id=new_custom_domain.id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
flash(res.message, res.message_category)
|
|
||||||
if res.redirect:
|
|
||||||
return redirect(url_for(res.redirect))
|
|
||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"dashboard/custom_domain.html",
|
"dashboard/custom_domain.html",
|
||||||
custom_domains=custom_domains,
|
custom_domains=custom_domains,
|
||||||
new_custom_domain_form=new_custom_domain_form,
|
new_custom_domain_form=new_custom_domain_form,
|
||||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||||
|
errors=errors,
|
||||||
|
mailboxes=mailboxes,
|
||||||
)
|
)
|
||||||
|
@ -3,12 +3,11 @@ from flask import flash, redirect, url_for, request, render_template
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
|
|
||||||
from app.constants import JobType
|
from app.config import JOB_DELETE_ACCOUNT
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Subscription, Job
|
from app.models import Subscription, Job
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
class DeleteDirForm(FlaskForm):
|
class DeleteDirForm(FlaskForm):
|
||||||
@ -34,13 +33,8 @@ def delete_account():
|
|||||||
|
|
||||||
# Schedule delete account job
|
# Schedule delete account job
|
||||||
LOG.w("schedule delete account job for %s", current_user)
|
LOG.w("schedule delete account job for %s", current_user)
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
|
||||||
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
|
||||||
)
|
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.DELETE_ACCOUNT.value,
|
name=JOB_DELETE_ACCOUNT,
|
||||||
payload={"user_id": current_user.id},
|
payload={"user_id": current_user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
@ -22,7 +20,6 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.errors import DirectoryInTrashError
|
from app.errors import DirectoryInTrashError
|
||||||
from app.models import Directory, Mailbox, DirectoryMailbox
|
from app.models import Directory, Mailbox, DirectoryMailbox
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
class NewDirForm(FlaskForm):
|
class NewDirForm(FlaskForm):
|
||||||
@ -72,9 +69,7 @@ def directory():
|
|||||||
if not delete_dir_form.validate():
|
if not delete_dir_form.validate():
|
||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_obj: Optional[Directory] = Directory.get(
|
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||||
delete_dir_form.directory_id.data
|
|
||||||
)
|
|
||||||
|
|
||||||
if not dir_obj:
|
if not dir_obj:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -84,11 +79,6 @@ def directory():
|
|||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
|
|
||||||
name = dir_obj.name
|
name = dir_obj.name
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.DeleteDirectory,
|
|
||||||
message=f"Delete directory {dir_obj.id} ({dir_obj.name})",
|
|
||||||
)
|
|
||||||
Directory.delete(dir_obj.id)
|
Directory.delete(dir_obj.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Directory {name} has been deleted", "success")
|
flash(f"Directory {name} has been deleted", "success")
|
||||||
@ -100,7 +90,7 @@ def directory():
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = toggle_dir_form.directory_id.data
|
dir_id = toggle_dir_form.directory_id.data
|
||||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
dir_obj = Directory.get(dir_id)
|
||||||
|
|
||||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -113,11 +103,6 @@ def directory():
|
|||||||
dir_obj.disabled = True
|
dir_obj.disabled = True
|
||||||
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
||||||
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateDirectory,
|
|
||||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) set disabled = {dir_obj.disabled}",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
@ -127,7 +112,7 @@ def directory():
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = update_dir_form.directory_id.data
|
dir_id = update_dir_form.directory_id.data
|
||||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
dir_obj = Directory.get(dir_id)
|
||||||
|
|
||||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -158,12 +143,6 @@ def directory():
|
|||||||
for mailbox in mailboxes:
|
for mailbox in mailboxes:
|
||||||
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
||||||
|
|
||||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateDirectory,
|
|
||||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) mailboxes ({mailboxes_as_str})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Directory {dir_obj.name} has been updated", "success")
|
flash(f"Directory {dir_obj.name} has been updated", "success")
|
||||||
|
|
||||||
@ -202,11 +181,6 @@ def directory():
|
|||||||
new_dir = Directory.create(
|
new_dir = Directory.create(
|
||||||
name=new_dir_name, user_id=current_user.id
|
name=new_dir_name, user_id=current_user.id
|
||||||
)
|
)
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.CreateDirectory,
|
|
||||||
message=f"New directory {new_dir.name} ({new_dir.name})",
|
|
||||||
)
|
|
||||||
except DirectoryInTrashError:
|
except DirectoryInTrashError:
|
||||||
flash(
|
flash(
|
||||||
f"{new_dir_name} has been used before and cannot be reused",
|
f"{new_dir_name} has been used before and cannot be reused",
|
||||||
|
@ -1,26 +1,33 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
|
import arrow
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators, IntegerField
|
from wtforms import StringField, validators, IntegerField
|
||||||
|
|
||||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
|
||||||
from app.constants import DMARC_RECORD
|
|
||||||
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
|
||||||
from app.custom_domain_validation import CustomDomainValidation
|
from app.custom_domain_validation import CustomDomainValidation
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.dns_utils import (
|
||||||
|
get_mx_domains,
|
||||||
|
get_spf_domain,
|
||||||
|
get_txt_record,
|
||||||
|
is_mx_equivalent,
|
||||||
|
)
|
||||||
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
CustomDomain,
|
CustomDomain,
|
||||||
Alias,
|
Alias,
|
||||||
DomainDeletedAlias,
|
DomainDeletedAlias,
|
||||||
Mailbox,
|
Mailbox,
|
||||||
|
DomainMailbox,
|
||||||
AutoCreateRule,
|
AutoCreateRule,
|
||||||
AutoCreateRuleMailbox,
|
AutoCreateRuleMailbox,
|
||||||
|
Job,
|
||||||
)
|
)
|
||||||
from app.regex_utils import regex_match
|
from app.regex_utils import regex_match
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import random_string, CSRFValidationForm
|
from app.utils import random_string, CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
@ -37,9 +44,13 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
custom_domain.ownership_txt_token = random_string(30)
|
custom_domain.ownership_txt_token = random_string(30)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
|
||||||
|
|
||||||
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
||||||
csrf_form = CSRFValidationForm()
|
csrf_form = CSRFValidationForm()
|
||||||
|
|
||||||
|
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||||
|
|
||||||
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
|
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
|
||||||
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
|
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
|
||||||
|
|
||||||
@ -48,14 +59,15 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(request.url)
|
return redirect(request.url)
|
||||||
if request.form.get("form-name") == "check-ownership":
|
if request.form.get("form-name") == "check-ownership":
|
||||||
ownership_validation_result = domain_validator.validate_domain_ownership(
|
txt_records = get_txt_record(custom_domain.domain)
|
||||||
custom_domain
|
|
||||||
)
|
if custom_domain.get_ownership_dns_txt_value() in txt_records:
|
||||||
if ownership_validation_result.success:
|
|
||||||
flash(
|
flash(
|
||||||
"Domain ownership is verified. Please proceed to the other records setup",
|
"Domain ownership is verified. Please proceed to the other records setup",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
custom_domain.ownership_verified = True
|
||||||
|
Session.commit()
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.domain_detail_dns",
|
"dashboard.domain_detail_dns",
|
||||||
@ -66,28 +78,36 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
else:
|
else:
|
||||||
flash("We can't find the needed TXT record", "error")
|
flash("We can't find the needed TXT record", "error")
|
||||||
ownership_ok = False
|
ownership_ok = False
|
||||||
ownership_errors = ownership_validation_result.errors
|
ownership_errors = txt_records
|
||||||
|
|
||||||
elif request.form.get("form-name") == "check-mx":
|
elif request.form.get("form-name") == "check-mx":
|
||||||
mx_validation_result = domain_validator.validate_mx_records(custom_domain)
|
mx_domains = get_mx_domains(custom_domain.domain)
|
||||||
if mx_validation_result.success:
|
|
||||||
|
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
|
||||||
|
flash("The MX record is not correctly set", "warning")
|
||||||
|
|
||||||
|
mx_ok = False
|
||||||
|
# build mx_errors to show to user
|
||||||
|
mx_errors = [
|
||||||
|
f"{priority} {domain}" for (priority, domain) in mx_domains
|
||||||
|
]
|
||||||
|
else:
|
||||||
flash(
|
flash(
|
||||||
"Your domain can start receiving emails. You can now use it to create alias",
|
"Your domain can start receiving emails. You can now use it to create alias",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
custom_domain.verified = True
|
||||||
|
Session.commit()
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
|
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
flash("The MX record is not correctly set", "warning")
|
|
||||||
mx_ok = False
|
|
||||||
mx_errors = mx_validation_result.errors
|
|
||||||
|
|
||||||
elif request.form.get("form-name") == "check-spf":
|
elif request.form.get("form-name") == "check-spf":
|
||||||
spf_validation_result = domain_validator.validate_spf_records(custom_domain)
|
spf_domains = get_spf_domain(custom_domain.domain)
|
||||||
if spf_validation_result.success:
|
if EMAIL_DOMAIN in spf_domains:
|
||||||
|
custom_domain.spf_verified = True
|
||||||
|
Session.commit()
|
||||||
flash("SPF is setup correctly", "success")
|
flash("SPF is setup correctly", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
@ -95,12 +115,14 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
custom_domain.spf_verified = False
|
||||||
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
|
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
|
||||||
"warning",
|
"warning",
|
||||||
)
|
)
|
||||||
spf_ok = False
|
spf_ok = False
|
||||||
spf_errors = spf_validation_result.errors
|
spf_errors = get_txt_record(custom_domain.domain)
|
||||||
|
|
||||||
elif request.form.get("form-name") == "check-dkim":
|
elif request.form.get("form-name") == "check-dkim":
|
||||||
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
|
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
|
||||||
@ -116,10 +138,10 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
flash("DKIM: the CNAME record is not correctly set", "warning")
|
flash("DKIM: the CNAME record is not correctly set", "warning")
|
||||||
|
|
||||||
elif request.form.get("form-name") == "check-dmarc":
|
elif request.form.get("form-name") == "check-dmarc":
|
||||||
dmarc_validation_result = domain_validator.validate_dmarc_records(
|
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
|
||||||
custom_domain
|
if dmarc_record in txt_records:
|
||||||
)
|
custom_domain.dmarc_verified = True
|
||||||
if dmarc_validation_result.success:
|
Session.commit()
|
||||||
flash("DMARC is setup correctly", "success")
|
flash("DMARC is setup correctly", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
@ -127,23 +149,19 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
custom_domain.dmarc_verified = False
|
||||||
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
"DMARC: The TXT record is not correctly set",
|
"DMARC: The TXT record is not correctly set",
|
||||||
"warning",
|
"warning",
|
||||||
)
|
)
|
||||||
dmarc_ok = False
|
dmarc_ok = False
|
||||||
dmarc_errors = dmarc_validation_result.errors
|
dmarc_errors = txt_records
|
||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"dashboard/domain_detail/dns.html",
|
"dashboard/domain_detail/dns.html",
|
||||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||||
ownership_records=domain_validator.get_ownership_verification_record(
|
dkim_records=domain_validator.get_dkim_records(),
|
||||||
custom_domain
|
|
||||||
),
|
|
||||||
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
|
||||||
dkim_records=domain_validator.get_dkim_records(custom_domain),
|
|
||||||
spf_record=domain_validator.get_expected_spf_record(custom_domain),
|
|
||||||
dmarc_record=DMARC_RECORD,
|
|
||||||
**locals(),
|
**locals(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -165,11 +183,6 @@ def domain_detail(custom_domain_id):
|
|||||||
return redirect(request.url)
|
return redirect(request.url)
|
||||||
if request.form.get("form-name") == "switch-catch-all":
|
if request.form.get("form-name") == "switch-catch-all":
|
||||||
custom_domain.catch_all = not custom_domain.catch_all
|
custom_domain.catch_all = not custom_domain.catch_all
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateCustomDomain,
|
|
||||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) catch all to {custom_domain.catch_all}",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
if custom_domain.catch_all:
|
if custom_domain.catch_all:
|
||||||
@ -188,11 +201,6 @@ def domain_detail(custom_domain_id):
|
|||||||
elif request.form.get("form-name") == "set-name":
|
elif request.form.get("form-name") == "set-name":
|
||||||
if request.form.get("action") == "save":
|
if request.form.get("action") == "save":
|
||||||
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateCustomDomain,
|
|
||||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"Default alias name for Domain {custom_domain.domain} has been set",
|
f"Default alias name for Domain {custom_domain.domain} has been set",
|
||||||
@ -200,11 +208,6 @@ def domain_detail(custom_domain_id):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
custom_domain.name = None
|
custom_domain.name = None
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateCustomDomain,
|
|
||||||
message=f"Cleared custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
||||||
@ -218,11 +221,6 @@ def domain_detail(custom_domain_id):
|
|||||||
custom_domain.random_prefix_generation = (
|
custom_domain.random_prefix_generation = (
|
||||||
not custom_domain.random_prefix_generation
|
not custom_domain.random_prefix_generation
|
||||||
)
|
)
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateCustomDomain,
|
|
||||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) random prefix generation to {custom_domain.random_prefix_generation}",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
if custom_domain.random_prefix_generation:
|
if custom_domain.random_prefix_generation:
|
||||||
@ -240,16 +238,40 @@ def domain_detail(custom_domain_id):
|
|||||||
)
|
)
|
||||||
elif request.form.get("form-name") == "update":
|
elif request.form.get("form-name") == "update":
|
||||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||||
result = set_custom_domain_mailboxes(
|
# check if mailbox is not tempered with
|
||||||
user_id=current_user.id,
|
mailboxes = []
|
||||||
custom_domain=custom_domain,
|
for mailbox_id in mailbox_ids:
|
||||||
mailbox_ids=mailbox_ids,
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
if (
|
||||||
|
not mailbox
|
||||||
|
or mailbox.user_id != current_user.id
|
||||||
|
or not mailbox.verified
|
||||||
|
):
|
||||||
|
flash("Something went wrong, please retry", "warning")
|
||||||
|
return redirect(
|
||||||
|
url_for(
|
||||||
|
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
mailboxes.append(mailbox)
|
||||||
|
|
||||||
|
if not mailboxes:
|
||||||
|
flash("You must select at least 1 mailbox", "warning")
|
||||||
|
return redirect(
|
||||||
|
url_for(
|
||||||
|
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if result.success:
|
# first remove all existing domain-mailboxes links
|
||||||
|
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
|
for mailbox in mailboxes:
|
||||||
|
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||||
else:
|
|
||||||
flash(result.reason.value, "warning")
|
|
||||||
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
||||||
@ -257,8 +279,16 @@ def domain_detail(custom_domain_id):
|
|||||||
|
|
||||||
elif request.form.get("form-name") == "delete":
|
elif request.form.get("form-name") == "delete":
|
||||||
name = custom_domain.domain
|
name = custom_domain.domain
|
||||||
|
LOG.d("Schedule deleting %s", custom_domain)
|
||||||
|
|
||||||
delete_custom_domain(custom_domain)
|
# Schedule delete domain job
|
||||||
|
LOG.w("schedule delete domain job for %s", custom_domain)
|
||||||
|
Job.create(
|
||||||
|
name=JOB_DELETE_DOMAIN,
|
||||||
|
payload={"custom_domain_id": custom_domain.id},
|
||||||
|
run_at=arrow.now(),
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
flash(
|
flash(
|
||||||
f"{name} scheduled for deletion."
|
f"{name} scheduled for deletion."
|
||||||
|
@ -11,7 +11,7 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import PartnerUser, SocialAuth
|
from app.models import PartnerUser, SocialAuth
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.utils import sanitize_next_url
|
from app.utils import sanitize_next_url
|
||||||
|
|
||||||
_SUDO_GAP = 120
|
_SUDO_GAP = 120
|
||||||
|
@ -71,10 +71,7 @@ def index():
|
|||||||
|
|
||||||
page = 0
|
page = 0
|
||||||
if request.args.get("page"):
|
if request.args.get("page"):
|
||||||
try:
|
|
||||||
page = int(request.args.get("page"))
|
page = int(request.args.get("page"))
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
highlight_alias_id = None
|
highlight_alias_id = None
|
||||||
if request.args.get("highlight_alias_id"):
|
if request.args.get("highlight_alias_id"):
|
||||||
@ -152,9 +149,7 @@ def index():
|
|||||||
)
|
)
|
||||||
flash(f"Alias {email} has been deleted", "success")
|
flash(f"Alias {email} has been deleted", "success")
|
||||||
elif request.form.get("form-name") == "disable-alias":
|
elif request.form.get("form-name") == "disable-alias":
|
||||||
alias_utils.change_alias_status(
|
alias_utils.change_alias_status(alias, enabled=False)
|
||||||
alias, enabled=False, message="Set enabled=False from dashboard"
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Alias {alias.email} has been disabled", "success")
|
flash(f"Alias {alias.email} has been disabled", "success")
|
||||||
|
|
||||||
|
@ -3,9 +3,11 @@ from flask_login import login_required, current_user
|
|||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
from app import parallel_limiter
|
from app.config import ADMIN_EMAIL
|
||||||
from app.coupon_utils import redeem_lifetime_coupon
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
|
from app.db import Session
|
||||||
|
from app.email_utils import send_email
|
||||||
|
from app.models import LifetimeCoupon
|
||||||
|
|
||||||
|
|
||||||
class CouponForm(FlaskForm):
|
class CouponForm(FlaskForm):
|
||||||
@ -14,7 +16,6 @@ class CouponForm(FlaskForm):
|
|||||||
|
|
||||||
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
|
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
@parallel_limiter.lock()
|
|
||||||
def lifetime_licence():
|
def lifetime_licence():
|
||||||
if current_user.lifetime:
|
if current_user.lifetime:
|
||||||
flash("You already have a lifetime licence", "warning")
|
flash("You already have a lifetime licence", "warning")
|
||||||
@ -31,12 +32,28 @@ def lifetime_licence():
|
|||||||
|
|
||||||
if coupon_form.validate_on_submit():
|
if coupon_form.validate_on_submit():
|
||||||
code = coupon_form.code.data
|
code = coupon_form.code.data
|
||||||
coupon = redeem_lifetime_coupon(code, current_user)
|
|
||||||
if coupon:
|
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=code)
|
||||||
|
if coupon and coupon.nb_used > 0:
|
||||||
|
coupon.nb_used -= 1
|
||||||
|
current_user.lifetime = True
|
||||||
|
current_user.lifetime_coupon_id = coupon.id
|
||||||
|
if coupon.paid:
|
||||||
|
current_user.paid_lifetime = True
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
# notify admin
|
||||||
|
send_email(
|
||||||
|
ADMIN_EMAIL,
|
||||||
|
subject=f"User {current_user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
||||||
|
plaintext="",
|
||||||
|
html="",
|
||||||
|
)
|
||||||
|
|
||||||
flash("You are upgraded to lifetime premium!", "success")
|
flash("You are upgraded to lifetime premium!", "success")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
flash("Coupon code expired or invalid", "warning")
|
flash(f"Code *{code}* expired or invalid", "warning")
|
||||||
|
|
||||||
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)
|
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
import json
|
import json
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
@ -16,7 +15,6 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
@ -121,22 +119,11 @@ def mailbox_route():
|
|||||||
@login_required
|
@login_required
|
||||||
def mailbox_verify():
|
def mailbox_verify():
|
||||||
mailbox_id = request.args.get("mailbox_id")
|
mailbox_id = request.args.get("mailbox_id")
|
||||||
if not mailbox_id:
|
|
||||||
LOG.i("Missing mailbox_id")
|
|
||||||
flash("You followed an invalid link", "error")
|
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
|
||||||
|
|
||||||
code = request.args.get("code")
|
code = request.args.get("code")
|
||||||
if not code:
|
if not code:
|
||||||
# Old way
|
# Old way
|
||||||
return verify_with_signed_secret(mailbox_id)
|
return verify_with_signed_secret(mailbox_id)
|
||||||
|
|
||||||
try:
|
|
||||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||||
except mailbox_utils.MailboxError as e:
|
|
||||||
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
|
|
||||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
|
||||||
LOG.d("Mailbox %s is verified", mailbox)
|
LOG.d("Mailbox %s is verified", mailbox)
|
||||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||||
|
|
||||||
@ -159,7 +146,7 @@ def verify_with_signed_secret(request: str):
|
|||||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
mailbox_id = mailbox_data[0]
|
mailbox_id = mailbox_data[0]
|
||||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
flash("Invalid link", "error")
|
flash("Invalid link", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
@ -169,11 +156,6 @@ def verify_with_signed_secret(request: str):
|
|||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
|
||||||
mailbox.verified = True
|
mailbox.verified = True
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.VerifyMailbox,
|
|
||||||
message=f"Verified mailbox {mailbox.id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
LOG.d("Mailbox %s is verified", mailbox)
|
LOG.d("Mailbox %s is verified", mailbox)
|
||||||
|
@ -1,31 +1,30 @@
|
|||||||
|
from smtplib import SMTPRecipientsRefused
|
||||||
|
|
||||||
from email_validator import validate_email, EmailNotValidError
|
from email_validator import validate_email, EmailNotValidError
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from itsdangerous import TimestampSigner
|
from itsdangerous import TimestampSigner
|
||||||
from wtforms import validators
|
from wtforms import validators
|
||||||
from wtforms.fields.simple import StringField
|
from wtforms.fields.html5 import EmailField
|
||||||
|
|
||||||
from app import mailbox_utils
|
|
||||||
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
||||||
|
from app.config import URL
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.email_utils import email_can_be_used_as_mailbox
|
||||||
|
from app.email_utils import mailbox_already_used, render, send_email
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.mailbox_utils import (
|
from app.log import LOG
|
||||||
perform_mailbox_email_change,
|
from app.models import Alias, AuthorizedAddress
|
||||||
MailboxEmailChangeError,
|
|
||||||
MailboxError,
|
|
||||||
)
|
|
||||||
from app.models import AuthorizedAddress
|
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import sanitize_email, CSRFValidationForm
|
from app.utils import sanitize_email, CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
class ChangeEmailForm(FlaskForm):
|
class ChangeEmailForm(FlaskForm):
|
||||||
email = StringField(
|
email = EmailField(
|
||||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -56,16 +55,31 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
request.form.get("form-name") == "update-email"
|
request.form.get("form-name") == "update-email"
|
||||||
and change_email_form.validate_on_submit()
|
and change_email_form.validate_on_submit()
|
||||||
):
|
):
|
||||||
|
new_email = sanitize_email(change_email_form.email.data)
|
||||||
|
if new_email != mailbox.email and not pending_email:
|
||||||
|
# check if this email is not already used
|
||||||
|
if mailbox_already_used(new_email, current_user) or Alias.get_by(
|
||||||
|
email=new_email
|
||||||
|
):
|
||||||
|
flash(f"Email {new_email} already used", "error")
|
||||||
|
elif not email_can_be_used_as_mailbox(new_email):
|
||||||
|
flash("You cannot use this email address as your mailbox", "error")
|
||||||
|
else:
|
||||||
|
mailbox.new_email = new_email
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = mailbox_utils.request_mailbox_email_change(
|
verify_mailbox_change(current_user, mailbox, new_email)
|
||||||
current_user, mailbox, change_email_form.email.data
|
except SMTPRecipientsRefused:
|
||||||
)
|
|
||||||
flash(
|
flash(
|
||||||
f"You are going to receive an email to confirm {mailbox.email}.",
|
f"Incorrect mailbox, please recheck {mailbox.email}",
|
||||||
|
"error",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
flash(
|
||||||
|
f"You are going to receive an email to confirm {new_email}.",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
except mailbox_utils.MailboxError as e:
|
|
||||||
flash(e.msg, "error")
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
@ -74,12 +88,8 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
flash("SPF enforcement globally not enabled", "error")
|
flash("SPF enforcement globally not enabled", "error")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
force_spf_value = request.form.get("spf-status") == "on"
|
mailbox.force_spf = (
|
||||||
mailbox.force_spf = force_spf_value
|
True if request.form.get("spf-status") == "on" else False
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Set force_spf to {force_spf_value} on mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
@ -103,11 +113,6 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
||||||
flash(f"{address} already added", "error")
|
flash(f"{address} already added", "error")
|
||||||
else:
|
else:
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Add authorized address {address} to mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
AuthorizedAddress.create(
|
AuthorizedAddress.create(
|
||||||
user_id=current_user.id,
|
user_id=current_user.id,
|
||||||
mailbox_id=mailbox.id,
|
mailbox_id=mailbox.id,
|
||||||
@ -128,11 +133,6 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
else:
|
else:
|
||||||
address = authorized_address.email
|
address = authorized_address.email
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Remove authorized address {address} from mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
AuthorizedAddress.delete(authorized_address_id)
|
AuthorizedAddress.delete(authorized_address_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"{address} has been deleted", "success")
|
flash(f"{address} has been deleted", "success")
|
||||||
@ -165,11 +165,6 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
except PGPException:
|
except PGPException:
|
||||||
flash("Cannot add the public key, please verify it", "error")
|
flash("Cannot add the public key, please verify it", "error")
|
||||||
else:
|
else:
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Add PGP Key {mailbox.pgp_finger_print} to mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Your PGP public key is saved successfully", "success")
|
flash("Your PGP public key is saved successfully", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
@ -177,11 +172,6 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
)
|
)
|
||||||
elif request.form.get("action") == "remove":
|
elif request.form.get("action") == "remove":
|
||||||
# Free user can decide to remove their added PGP key
|
# Free user can decide to remove their added PGP key
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Remove PGP Key {mailbox.pgp_finger_print} from mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
mailbox.pgp_public_key = None
|
mailbox.pgp_public_key = None
|
||||||
mailbox.pgp_finger_print = None
|
mailbox.pgp_finger_print = None
|
||||||
mailbox.disable_pgp = False
|
mailbox.disable_pgp = False
|
||||||
@ -201,19 +191,9 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
mailbox.disable_pgp = False
|
mailbox.disable_pgp = False
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Enabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
flash(f"PGP is enabled on {mailbox.email}", "info")
|
flash(f"PGP is enabled on {mailbox.email}", "info")
|
||||||
else:
|
else:
|
||||||
mailbox.disable_pgp = True
|
mailbox.disable_pgp = True
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Disabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
flash(f"PGP is disabled on {mailbox.email}", "info")
|
flash(f"PGP is disabled on {mailbox.email}", "info")
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -223,11 +203,6 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
elif request.form.get("form-name") == "generic-subject":
|
elif request.form.get("form-name") == "generic-subject":
|
||||||
if request.form.get("action") == "save":
|
if request.form.get("action") == "save":
|
||||||
mailbox.generic_subject = request.form.get("generic-subject")
|
mailbox.generic_subject = request.form.get("generic-subject")
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Set generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Generic subject is enabled", "success")
|
flash("Generic subject is enabled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
@ -235,11 +210,6 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
)
|
)
|
||||||
elif request.form.get("action") == "remove":
|
elif request.form.get("action") == "remove":
|
||||||
mailbox.generic_subject = None
|
mailbox.generic_subject = None
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Remove generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Generic subject is disabled", "success")
|
flash("Generic subject is disabled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
@ -250,57 +220,91 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
return render_template("dashboard/mailbox_detail.html", **locals())
|
return render_template("dashboard/mailbox_detail.html", **locals())
|
||||||
|
|
||||||
|
|
||||||
|
def verify_mailbox_change(user, mailbox, new_email):
|
||||||
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
|
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||||
|
verification_url = (
|
||||||
|
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
|
||||||
|
)
|
||||||
|
|
||||||
|
send_email(
|
||||||
|
new_email,
|
||||||
|
"Confirm mailbox change on SimpleLogin",
|
||||||
|
render(
|
||||||
|
"transactional/verify-mailbox-change.txt.jinja2",
|
||||||
|
user=user,
|
||||||
|
link=verification_url,
|
||||||
|
mailbox_email=mailbox.email,
|
||||||
|
mailbox_new_email=new_email,
|
||||||
|
),
|
||||||
|
render(
|
||||||
|
"transactional/verify-mailbox-change.html",
|
||||||
|
user=user,
|
||||||
|
link=verification_url,
|
||||||
|
mailbox_email=mailbox.email,
|
||||||
|
mailbox_new_email=new_email,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route(
|
@dashboard_bp.route(
|
||||||
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
|
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
|
||||||
)
|
)
|
||||||
@login_required
|
@login_required
|
||||||
def cancel_mailbox_change_route(mailbox_id):
|
def cancel_mailbox_change_route(mailbox_id):
|
||||||
try:
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
mailbox_utils.cancel_email_change(mailbox_id, current_user)
|
if not mailbox or mailbox.user_id != current_user.id:
|
||||||
|
flash("You cannot see this page", "warning")
|
||||||
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
|
if mailbox.new_email:
|
||||||
|
mailbox.new_email = None
|
||||||
|
Session.commit()
|
||||||
flash("Your mailbox change is cancelled", "success")
|
flash("Your mailbox change is cancelled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
except MailboxError as e:
|
|
||||||
flash(e.msg, "warning")
|
|
||||||
return redirect(url_for("dashboard.index"))
|
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/mailbox/confirm_change")
|
|
||||||
@login_required
|
|
||||||
@limiter.limit("3/minute")
|
|
||||||
def mailbox_confirm_email_change_route():
|
|
||||||
mailbox_id = request.args.get("mailbox_id")
|
|
||||||
|
|
||||||
code = request.args.get("code")
|
|
||||||
if code:
|
|
||||||
try:
|
|
||||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
|
||||||
flash("Successfully changed mailbox email", "success")
|
|
||||||
return redirect(
|
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
|
||||||
)
|
|
||||||
except mailbox_utils.MailboxError as e:
|
|
||||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
|
||||||
else:
|
else:
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
flash("You have no pending mailbox change", "warning")
|
||||||
try:
|
|
||||||
mailbox_id = int(s.unsign(mailbox_id, max_age=900))
|
|
||||||
res = perform_mailbox_email_change(mailbox_id)
|
|
||||||
flash(res.message, res.message_category)
|
|
||||||
if res.error:
|
|
||||||
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
elif res.error == MailboxEmailChangeError.InvalidId:
|
|
||||||
return redirect(url_for("dashboard.index"))
|
|
||||||
else:
|
@dashboard_bp.route("/mailbox/confirm_change")
|
||||||
raise Exception("Unhandled MailboxEmailChangeError")
|
def mailbox_confirm_change_route():
|
||||||
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
|
signed_mailbox_id = request.args.get("mailbox_id")
|
||||||
|
|
||||||
|
try:
|
||||||
|
mailbox_id = int(s.unsign(signed_mailbox_id, max_age=900))
|
||||||
except Exception:
|
except Exception:
|
||||||
flash("Invalid link", "error")
|
flash("Invalid link", "error")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
else:
|
||||||
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
|
||||||
flash("Successfully changed mailbox email", "success")
|
# new_email can be None if user cancels change in the meantime
|
||||||
return redirect(url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id))
|
if mailbox and mailbox.new_email:
|
||||||
|
user = mailbox.user
|
||||||
|
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||||
|
flash(f"{mailbox.new_email} is already used", "error")
|
||||||
|
return redirect(
|
||||||
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
mailbox.email = mailbox.new_email
|
||||||
|
mailbox.new_email = None
|
||||||
|
|
||||||
|
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||||
|
mailbox.verified = True
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
LOG.d("Mailbox change %s is verified", mailbox)
|
||||||
|
flash(f"The {mailbox.email} is updated", "success")
|
||||||
|
return redirect(
|
||||||
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
flash("Invalid link", "error")
|
||||||
|
return redirect(url_for("dashboard.index"))
|
||||||
|
@ -43,10 +43,7 @@ def notification_route(notification_id):
|
|||||||
def notifications_route():
|
def notifications_route():
|
||||||
page = 0
|
page = 0
|
||||||
if request.args.get("page"):
|
if request.args.get("page"):
|
||||||
try:
|
|
||||||
page = int(request.args.get("page"))
|
page = int(request.args.get("page"))
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
notifications = (
|
notifications = (
|
||||||
Notification.filter_by(user_id=current_user.id)
|
Notification.filter_by(user_id=current_user.id)
|
||||||
|
@ -22,7 +22,7 @@ from app.models import (
|
|||||||
PartnerUser,
|
PartnerUser,
|
||||||
PartnerSubscription,
|
PartnerSubscription,
|
||||||
)
|
)
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
|
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
|
||||||
|
@ -41,8 +41,7 @@ from app.models import (
|
|||||||
PartnerSubscription,
|
PartnerSubscription,
|
||||||
UnsubscribeBehaviourEnum,
|
UnsubscribeBehaviourEnum,
|
||||||
)
|
)
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.proton.proton_unlink import can_unlink_proton_account
|
|
||||||
from app.utils import (
|
from app.utils import (
|
||||||
random_string,
|
random_string,
|
||||||
CSRFValidationForm,
|
CSRFValidationForm,
|
||||||
@ -175,12 +174,7 @@ def setting():
|
|||||||
flash("Your preference has been updated", "success")
|
flash("Your preference has been updated", "success")
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
elif request.form.get("form-name") == "random-alias-suffix":
|
elif request.form.get("form-name") == "random-alias-suffix":
|
||||||
try:
|
|
||||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||||
except ValueError:
|
|
||||||
flash("Invalid value", "error")
|
|
||||||
return redirect(url_for("dashboard.setting"))
|
|
||||||
|
|
||||||
if AliasSuffixEnum.has_value(scheme):
|
if AliasSuffixEnum.has_value(scheme):
|
||||||
current_user.random_alias_suffix = scheme
|
current_user.random_alias_suffix = scheme
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -324,5 +318,4 @@ def setting():
|
|||||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||||
connect_with_proton=CONNECT_WITH_PROTON,
|
connect_with_proton=CONNECT_WITH_PROTON,
|
||||||
proton_linked_account=proton_linked_account,
|
proton_linked_account=proton_linked_account,
|
||||||
can_unlink_proton_account=can_unlink_proton_account(current_user),
|
|
||||||
)
|
)
|
||||||
|
@ -11,7 +11,6 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.errors import SubdomainInTrashError
|
from app.errors import SubdomainInTrashError
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import CustomDomain, Mailbox, SLDomain
|
from app.models import CustomDomain, Mailbox, SLDomain
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
# Only lowercase letters, numbers, dashes (-) are currently supported
|
# Only lowercase letters, numbers, dashes (-) are currently supported
|
||||||
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
||||||
@ -103,12 +102,6 @@ def subdomain_route():
|
|||||||
ownership_verified=True,
|
ownership_verified=True,
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.CreateCustomDomain,
|
|
||||||
message=f"Create subdomain {new_custom_domain.id} ({full_domain})",
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
except SubdomainInTrashError:
|
except SubdomainInTrashError:
|
||||||
flash(
|
flash(
|
||||||
f"{full_domain} has been used before and cannot be reused",
|
f"{full_domain} has been used before and cannot be reused",
|
||||||
|
@ -32,9 +32,7 @@ def unsubscribe(alias_id):
|
|||||||
|
|
||||||
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
alias_utils.change_alias_status(
|
alias_utils.change_alias_status(alias, False)
|
||||||
alias, enabled=False, message="Set enabled=False from unsubscribe request"
|
|
||||||
)
|
|
||||||
flash(f"Alias {alias.email} has been blocked", "success")
|
flash(f"Alias {alias.email} has been blocked", "success")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""List of clients"""
|
"""List of clients"""
|
||||||
|
|
||||||
from flask import render_template
|
from flask import render_template
|
||||||
from flask_login import current_user, login_required
|
from flask_login import current_user, login_required
|
||||||
|
|
||||||
|
@ -1,134 +1,120 @@
|
|||||||
from abc import ABC, abstractmethod
|
from app import config
|
||||||
from typing import List, Optional
|
from typing import Optional, List, Tuple
|
||||||
|
|
||||||
import dns.resolver
|
import dns.resolver
|
||||||
|
|
||||||
from app.config import NAMESERVERS
|
|
||||||
|
def _get_dns_resolver():
|
||||||
|
my_resolver = dns.resolver.Resolver()
|
||||||
|
my_resolver.nameservers = config.NAMESERVERS
|
||||||
|
|
||||||
|
return my_resolver
|
||||||
|
|
||||||
|
|
||||||
|
def get_ns(hostname) -> [str]:
|
||||||
|
try:
|
||||||
|
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
return [a.to_text() for a in answers]
|
||||||
|
|
||||||
|
|
||||||
|
def get_cname_record(hostname) -> Optional[str]:
|
||||||
|
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
|
||||||
|
try:
|
||||||
|
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for a in answers:
|
||||||
|
ret = a.to_text()
|
||||||
|
return ret[:-1]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_mx_domains(hostname) -> [(int, str)]:
|
||||||
|
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||||
|
domain name ends with a "." at the end.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
|
||||||
|
for a in answers:
|
||||||
|
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||||
|
parts = record.split(" ")
|
||||||
|
|
||||||
|
ret.append((int(parts[0]), parts[1]))
|
||||||
|
|
||||||
|
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
||||||
|
|
||||||
|
|
||||||
_include_spf = "include:"
|
_include_spf = "include:"
|
||||||
|
|
||||||
|
|
||||||
class DNSClient(ABC):
|
def get_spf_domain(hostname) -> [str]:
|
||||||
@abstractmethod
|
"""return all domains listed in *include:*"""
|
||||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_spf_domain(self, hostname: str) -> List[str]:
|
|
||||||
"""
|
|
||||||
return all domains listed in *include:*
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
records = self.get_txt_record(hostname)
|
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for record in records:
|
|
||||||
|
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||||
|
for record in a.strings:
|
||||||
|
record = record.decode() # record is bytes
|
||||||
|
|
||||||
if record.startswith("v=spf1"):
|
if record.startswith("v=spf1"):
|
||||||
parts = record.split(" ")
|
parts = record.split(" ")
|
||||||
for part in parts:
|
for part in parts:
|
||||||
if part.startswith(_include_spf):
|
if part.startswith(_include_spf):
|
||||||
ret.append(
|
ret.append(part[part.find(_include_spf) + len(_include_spf) :])
|
||||||
part[part.find(_include_spf) + len(_include_spf) :]
|
|
||||||
)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_txt_record(hostname) -> [str]:
|
||||||
|
try:
|
||||||
|
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||||
except Exception:
|
except Exception:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NetworkDNSClient(DNSClient):
|
|
||||||
def __init__(self, nameservers: List[str]):
|
|
||||||
self._resolver = dns.resolver.Resolver()
|
|
||||||
self._resolver.nameservers = nameservers
|
|
||||||
|
|
||||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
answers = self._resolver.resolve(hostname, "CNAME", search=True)
|
|
||||||
for a in answers:
|
|
||||||
ret = a.to_text()
|
|
||||||
return ret[:-1]
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
|
||||||
"""
|
|
||||||
return list of (priority, domain name) sorted by priority (lowest priority first)
|
|
||||||
domain name ends with a "." at the end.
|
|
||||||
"""
|
|
||||||
ret = {}
|
|
||||||
try:
|
|
||||||
answers = self._resolver.resolve(hostname, "MX", search=True)
|
|
||||||
for a in answers:
|
|
||||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
|
||||||
parts = record.split(" ")
|
|
||||||
prio = int(parts[0])
|
|
||||||
if prio not in ret:
|
|
||||||
ret[prio] = []
|
|
||||||
ret[prio].append(parts[1])
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
|
||||||
try:
|
|
||||||
answers = self._resolver.resolve(hostname, "TXT", search=False)
|
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||||
for record in a.strings:
|
for record in a.strings:
|
||||||
ret.append(record.decode())
|
record = record.decode() # record is bytes
|
||||||
|
|
||||||
|
ret.append(record)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
except Exception:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class InMemoryDNSClient(DNSClient):
|
def is_mx_equivalent(
|
||||||
def __init__(self):
|
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
|
||||||
self.cname_records: dict[str, Optional[str]] = {}
|
) -> bool:
|
||||||
self.mx_records: dict[int, dict[int, list[str]]] = {}
|
"""
|
||||||
self.spf_records: dict[str, List[str]] = {}
|
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||||
self.txt_records: dict[str, List[str]] = {}
|
mx_domains and ref_mx_domains are list of (priority, domain)
|
||||||
|
|
||||||
def set_cname_record(self, hostname: str, cname: str):
|
The priority order is taken into account but not the priority number.
|
||||||
self.cname_records[hostname] = cname
|
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||||
|
"""
|
||||||
|
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
|
||||||
|
ref_mx_domains = sorted(
|
||||||
|
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
|
||||||
|
)
|
||||||
|
|
||||||
def set_mx_records(self, hostname: str, mx_list: dict[int, list[str]]):
|
if len(mx_domains) < len(ref_mx_domains):
|
||||||
self.mx_records[hostname] = mx_list
|
return False
|
||||||
|
|
||||||
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
for i in range(0, len(ref_mx_domains)):
|
||||||
self.txt_records[hostname] = txt_list
|
if mx_domains[i][1] != ref_mx_domains[i][1]:
|
||||||
|
return False
|
||||||
|
|
||||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
return True
|
||||||
return self.cname_records.get(hostname)
|
|
||||||
|
|
||||||
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
|
||||||
return self.mx_records.get(hostname, {})
|
|
||||||
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
|
||||||
return self.txt_records.get(hostname, [])
|
|
||||||
|
|
||||||
|
|
||||||
global_dns_client: Optional[DNSClient] = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_network_dns_client() -> DNSClient:
|
|
||||||
global global_dns_client
|
|
||||||
if global_dns_client is not None:
|
|
||||||
return global_dns_client
|
|
||||||
return NetworkDNSClient(NAMESERVERS)
|
|
||||||
|
|
||||||
|
|
||||||
def set_global_dns_client(dns_client: Optional[DNSClient]):
|
|
||||||
global global_dns_client
|
|
||||||
global_dns_client = dns_client
|
|
||||||
|
|
||||||
|
|
||||||
def get_mx_domains(hostname: str) -> dict[int, list[str]]:
|
|
||||||
return get_network_dns_client().get_mx_domains(hostname)
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Email headers"""
|
"""Email headers"""
|
||||||
|
|
||||||
MESSAGE_ID = "Message-ID"
|
MESSAGE_ID = "Message-ID"
|
||||||
IN_REPLY_TO = "In-Reply-To"
|
IN_REPLY_TO = "In-Reply-To"
|
||||||
REFERENCES = "References"
|
REFERENCES = "References"
|
||||||
|
@ -592,7 +592,7 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
|
|||||||
|
|
||||||
from app.models import CustomDomain
|
from app.models import CustomDomain
|
||||||
|
|
||||||
if CustomDomain.get_by(domain=domain, is_sl_subdomain=True, verified=True):
|
if CustomDomain.get_by(domain=domain, verified=True):
|
||||||
LOG.d("domain %s is a SimpleLogin custom domain", domain)
|
LOG.d("domain %s is a SimpleLogin custom domain", domain)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -657,11 +657,7 @@ def get_mx_domain_list(domain) -> [str]:
|
|||||||
"""
|
"""
|
||||||
priority_domains = get_mx_domains(domain)
|
priority_domains = get_mx_domains(domain)
|
||||||
|
|
||||||
mx_domains = []
|
return [d[:-1] for _, d in priority_domains]
|
||||||
for prio in priority_domains:
|
|
||||||
for domain in priority_domains[prio]:
|
|
||||||
mx_domains.append(domain[:-1])
|
|
||||||
return mx_domains
|
|
||||||
|
|
||||||
|
|
||||||
def personal_email_already_used(email_address: str) -> bool:
|
def personal_email_already_used(email_address: str) -> bool:
|
||||||
@ -1349,18 +1345,17 @@ def get_queue_id(msg: Message) -> Optional[str]:
|
|||||||
|
|
||||||
received_header = str(msg[headers.RECEIVED])
|
received_header = str(msg[headers.RECEIVED])
|
||||||
if not received_header:
|
if not received_header:
|
||||||
return None
|
return
|
||||||
|
|
||||||
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
|
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
|
||||||
search_result = re.search(r"with E?SMTP[AS]? id ([0-9a-zA-Z]{1,})", received_header)
|
search_result = re.search("with ESMTPS id [0-9a-zA-Z]{1,}", received_header)
|
||||||
if search_result:
|
if not search_result:
|
||||||
return search_result.group(1)
|
return
|
||||||
search_result = re.search(
|
|
||||||
r"\(Postfix\)\r\n\tid ([a-zA-Z0-9]{1,});", received_header
|
# the "with ESMTPS id 4FxQmw1DXdz2vK2" part
|
||||||
)
|
with_esmtps = received_header[search_result.start() : search_result.end()]
|
||||||
if search_result:
|
|
||||||
return search_result.group(1)
|
return with_esmtps[len("with ESMTPS id ") :]
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def should_ignore_bounce(mail_from: str) -> bool:
|
def should_ignore_bounce(mail_from: str) -> bool:
|
||||||
|
@ -8,7 +8,7 @@ from app.errors import ProtonPartnerNotSetUp
|
|||||||
from app.events.generated import event_pb2
|
from app.events.generated import event_pb2
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, PartnerUser, SyncEvent
|
from app.models import User, PartnerUser, SyncEvent
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
|
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
|
||||||
@ -30,30 +30,14 @@ class PostgresDispatcher(Dispatcher):
|
|||||||
return PostgresDispatcher()
|
return PostgresDispatcher()
|
||||||
|
|
||||||
|
|
||||||
class GlobalDispatcher:
|
|
||||||
__dispatcher: Optional[Dispatcher] = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_dispatcher() -> Dispatcher:
|
|
||||||
if not GlobalDispatcher.__dispatcher:
|
|
||||||
GlobalDispatcher.__dispatcher = PostgresDispatcher.get()
|
|
||||||
return GlobalDispatcher.__dispatcher
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set_dispatcher(dispatcher: Optional[Dispatcher]):
|
|
||||||
GlobalDispatcher.__dispatcher = dispatcher
|
|
||||||
|
|
||||||
|
|
||||||
class EventDispatcher:
|
class EventDispatcher:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def send_event(
|
def send_event(
|
||||||
user: User,
|
user: User,
|
||||||
content: event_pb2.EventContent,
|
content: event_pb2.EventContent,
|
||||||
dispatcher: Optional[Dispatcher] = None,
|
dispatcher: Dispatcher = PostgresDispatcher.get(),
|
||||||
skip_if_webhook_missing: bool = True,
|
skip_if_webhook_missing: bool = True,
|
||||||
):
|
):
|
||||||
if dispatcher is None:
|
|
||||||
dispatcher = GlobalDispatcher.get_dispatcher()
|
|
||||||
if config.EVENT_WEBHOOK_DISABLE:
|
if config.EVENT_WEBHOOK_DISABLE:
|
||||||
LOG.i("Not sending events because webhook is disabled")
|
LOG.i("Not sending events because webhook is disabled")
|
||||||
return
|
return
|
||||||
@ -64,9 +48,15 @@ class EventDispatcher:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if config.EVENT_WEBHOOK_ENABLED_USER_IDS is not None:
|
||||||
|
if user.id not in config.EVENT_WEBHOOK_ENABLED_USER_IDS:
|
||||||
|
return
|
||||||
|
|
||||||
partner_user = EventDispatcher.__partner_user(user.id)
|
partner_user = EventDispatcher.__partner_user(user.id)
|
||||||
if not partner_user:
|
if not partner_user:
|
||||||
LOG.i(f"Not sending events because there's no partner user for user {user}")
|
LOG.i(
|
||||||
|
f"Not sending events because there's no partner user for user {user}"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
event = event_pb2.Event(
|
event = event_pb2.Event(
|
||||||
@ -78,9 +68,7 @@ class EventDispatcher:
|
|||||||
|
|
||||||
serialized = event.SerializeToString()
|
serialized = event.SerializeToString()
|
||||||
dispatcher.send(serialized)
|
dispatcher.send(serialized)
|
||||||
|
newrelic.agent.record_custom_metric("Custom/events_stored", 1)
|
||||||
event_type = content.WhichOneof("content")
|
|
||||||
newrelic.agent.record_custom_event("EventStoredToDb", {"type": event_type})
|
|
||||||
LOG.i("Sent event to the dispatcher")
|
LOG.i("Sent event to the dispatcher")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -24,7 +24,7 @@ _sym_db = _symbol_database.Default()
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\":\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\x12\x10\n\x08lifetime\x18\x02 \x01(\x08\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x0e\n\x0cUserUnlinked\"\xce\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x12\x39\n\ruser_unlinked\x18\x07 \x01(\x0b\x32 .simplelogin_events.UserUnlinkedH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
||||||
|
|
||||||
_globals = globals()
|
_globals = globals()
|
||||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||||
@ -32,21 +32,19 @@ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
|
|||||||
if not _descriptor._USE_C_DESCRIPTORS:
|
if not _descriptor._USE_C_DESCRIPTORS:
|
||||||
DESCRIPTOR._loaded_options = None
|
DESCRIPTOR._loaded_options = None
|
||||||
_globals['_USERPLANCHANGED']._serialized_start=35
|
_globals['_USERPLANCHANGED']._serialized_start=35
|
||||||
_globals['_USERPLANCHANGED']._serialized_end=93
|
_globals['_USERPLANCHANGED']._serialized_end=75
|
||||||
_globals['_USERDELETED']._serialized_start=95
|
_globals['_USERDELETED']._serialized_start=77
|
||||||
_globals['_USERDELETED']._serialized_end=108
|
_globals['_USERDELETED']._serialized_end=90
|
||||||
_globals['_ALIASCREATED']._serialized_start=110
|
_globals['_ALIASCREATED']._serialized_start=92
|
||||||
_globals['_ALIASCREATED']._serialized_end=202
|
_globals['_ALIASCREATED']._serialized_end=182
|
||||||
_globals['_ALIASSTATUSCHANGED']._serialized_start=204
|
_globals['_ALIASSTATUSCHANGED']._serialized_start=184
|
||||||
_globals['_ALIASSTATUSCHANGED']._serialized_end=288
|
_globals['_ALIASSTATUSCHANGED']._serialized_end=260
|
||||||
_globals['_ALIASDELETED']._serialized_start=290
|
_globals['_ALIASDELETED']._serialized_start=262
|
||||||
_globals['_ALIASDELETED']._serialized_end=331
|
_globals['_ALIASDELETED']._serialized_end=315
|
||||||
_globals['_ALIASCREATEDLIST']._serialized_start=333
|
_globals['_ALIASCREATEDLIST']._serialized_start=317
|
||||||
_globals['_ALIASCREATEDLIST']._serialized_end=401
|
_globals['_ALIASCREATEDLIST']._serialized_end=385
|
||||||
_globals['_USERUNLINKED']._serialized_start=403
|
_globals['_EVENTCONTENT']._serialized_start=388
|
||||||
_globals['_USERUNLINKED']._serialized_end=417
|
_globals['_EVENTCONTENT']._serialized_end=791
|
||||||
_globals['_EVENTCONTENT']._serialized_start=420
|
_globals['_EVENT']._serialized_start=793
|
||||||
_globals['_EVENTCONTENT']._serialized_end=882
|
_globals['_EVENT']._serialized_end=914
|
||||||
_globals['_EVENT']._serialized_start=884
|
|
||||||
_globals['_EVENT']._serialized_end=1005
|
|
||||||
# @@protoc_insertion_point(module_scope)
|
# @@protoc_insertion_point(module_scope)
|
||||||
|
@ -6,50 +6,44 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
|
|||||||
DESCRIPTOR: _descriptor.FileDescriptor
|
DESCRIPTOR: _descriptor.FileDescriptor
|
||||||
|
|
||||||
class UserPlanChanged(_message.Message):
|
class UserPlanChanged(_message.Message):
|
||||||
__slots__ = ("plan_end_time", "lifetime")
|
__slots__ = ("plan_end_time",)
|
||||||
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||||
LIFETIME_FIELD_NUMBER: _ClassVar[int]
|
|
||||||
plan_end_time: int
|
plan_end_time: int
|
||||||
lifetime: bool
|
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
|
||||||
def __init__(self, plan_end_time: _Optional[int] = ..., lifetime: bool = ...) -> None: ...
|
|
||||||
|
|
||||||
class UserDeleted(_message.Message):
|
class UserDeleted(_message.Message):
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
def __init__(self) -> None: ...
|
def __init__(self) -> None: ...
|
||||||
|
|
||||||
class AliasCreated(_message.Message):
|
class AliasCreated(_message.Message):
|
||||||
__slots__ = ("id", "email", "note", "enabled", "created_at")
|
__slots__ = ("alias_id", "alias_email", "alias_note", "enabled")
|
||||||
ID_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||||
NOTE_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_NOTE_FIELD_NUMBER: _ClassVar[int]
|
||||||
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||||
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
|
alias_id: int
|
||||||
id: int
|
alias_email: str
|
||||||
email: str
|
alias_note: str
|
||||||
note: str
|
|
||||||
enabled: bool
|
enabled: bool
|
||||||
created_at: int
|
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ...
|
||||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., note: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
|
|
||||||
|
|
||||||
class AliasStatusChanged(_message.Message):
|
class AliasStatusChanged(_message.Message):
|
||||||
__slots__ = ("id", "email", "enabled", "created_at")
|
__slots__ = ("alias_id", "alias_email", "enabled")
|
||||||
ID_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||||
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||||
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
|
alias_id: int
|
||||||
id: int
|
alias_email: str
|
||||||
email: str
|
|
||||||
enabled: bool
|
enabled: bool
|
||||||
created_at: int
|
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., enabled: bool = ...) -> None: ...
|
||||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
|
|
||||||
|
|
||||||
class AliasDeleted(_message.Message):
|
class AliasDeleted(_message.Message):
|
||||||
__slots__ = ("id", "email")
|
__slots__ = ("alias_id", "alias_email")
|
||||||
ID_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||||
id: int
|
alias_id: int
|
||||||
email: str
|
alias_email: str
|
||||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ...) -> None: ...
|
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ...
|
||||||
|
|
||||||
class AliasCreatedList(_message.Message):
|
class AliasCreatedList(_message.Message):
|
||||||
__slots__ = ("events",)
|
__slots__ = ("events",)
|
||||||
@ -57,27 +51,21 @@ class AliasCreatedList(_message.Message):
|
|||||||
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
|
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
|
||||||
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
|
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
|
||||||
|
|
||||||
class UserUnlinked(_message.Message):
|
|
||||||
__slots__ = ()
|
|
||||||
def __init__(self) -> None: ...
|
|
||||||
|
|
||||||
class EventContent(_message.Message):
|
class EventContent(_message.Message):
|
||||||
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list", "user_unlinked")
|
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
|
||||||
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||||
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
|
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
|
||||||
USER_UNLINKED_FIELD_NUMBER: _ClassVar[int]
|
|
||||||
user_plan_change: UserPlanChanged
|
user_plan_change: UserPlanChanged
|
||||||
user_deleted: UserDeleted
|
user_deleted: UserDeleted
|
||||||
alias_created: AliasCreated
|
alias_created: AliasCreated
|
||||||
alias_status_change: AliasStatusChanged
|
alias_status_change: AliasStatusChanged
|
||||||
alias_deleted: AliasDeleted
|
alias_deleted: AliasDeleted
|
||||||
alias_create_list: AliasCreatedList
|
alias_create_list: AliasCreatedList
|
||||||
user_unlinked: UserUnlinked
|
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
|
||||||
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ..., user_unlinked: _Optional[_Union[UserUnlinked, _Mapping]] = ...) -> None: ...
|
|
||||||
|
|
||||||
class Event(_message.Message):
|
class Event(_message.Message):
|
||||||
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
|
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
|
||||||
|
@ -33,11 +33,8 @@ from app.models import (
|
|||||||
SLDomain,
|
SLDomain,
|
||||||
Hibp,
|
Hibp,
|
||||||
AliasHibp,
|
AliasHibp,
|
||||||
PartnerUser,
|
|
||||||
PartnerSubscription,
|
|
||||||
)
|
)
|
||||||
from app.pgp_utils import load_public_key
|
from app.pgp_utils import load_public_key
|
||||||
from app.proton.proton_partner import get_proton_partner
|
|
||||||
|
|
||||||
|
|
||||||
def fake_data():
|
def fake_data():
|
||||||
@ -90,7 +87,7 @@ def fake_data():
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email="hey@google.com",
|
website_email="hey@google.com",
|
||||||
reply_email="rep@sl.lan",
|
reply_email="rep@sl.local",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
EmailLog.create(
|
EmailLog.create(
|
||||||
@ -166,7 +163,7 @@ def fake_data():
|
|||||||
# user_id=user.id,
|
# user_id=user.id,
|
||||||
# alias_id=a.id,
|
# alias_id=a.id,
|
||||||
# website_email=f"contact{i}@example.com",
|
# website_email=f"contact{i}@example.com",
|
||||||
# reply_email=f"rep{i}@sl.lan",
|
# reply_email=f"rep{i}@sl.local",
|
||||||
# )
|
# )
|
||||||
# Session.commit()
|
# Session.commit()
|
||||||
# for _ in range(3):
|
# for _ in range(3):
|
||||||
@ -272,27 +269,3 @@ def fake_data():
|
|||||||
CustomDomain.create(
|
CustomDomain.create(
|
||||||
user_id=user.id, domain="old.com", verified=True, ownership_verified=True
|
user_id=user.id, domain="old.com", verified=True, ownership_verified=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a user
|
|
||||||
proton_partner = get_proton_partner()
|
|
||||||
user = User.create(
|
|
||||||
email="test@proton.me",
|
|
||||||
name="Proton test",
|
|
||||||
password="password",
|
|
||||||
activated=True,
|
|
||||||
is_admin=False,
|
|
||||||
intro_shown=True,
|
|
||||||
from_partner=True,
|
|
||||||
flush=True,
|
|
||||||
)
|
|
||||||
pu = PartnerUser.create(
|
|
||||||
user_id=user.id,
|
|
||||||
partner_id=proton_partner.id,
|
|
||||||
partner_email="test@proton.me",
|
|
||||||
external_user_id="DUMMY",
|
|
||||||
flush=True,
|
|
||||||
)
|
|
||||||
PartnerSubscription.create(
|
|
||||||
partner_user_id=pu.id, end_at=arrow.now().shift(years=1, days=1)
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
@ -2,8 +2,8 @@ import urllib
|
|||||||
from email.header import Header
|
from email.header import Header
|
||||||
from email.message import Message
|
from email.message import Message
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.email import headers
|
from app.email import headers
|
||||||
|
from app import config
|
||||||
from app.email_utils import add_or_replace_header, delete_header
|
from app.email_utils import add_or_replace_header, delete_header
|
||||||
from app.handler.unsubscribe_encoder import (
|
from app.handler.unsubscribe_encoder import (
|
||||||
UnsubscribeEncoder,
|
UnsubscribeEncoder,
|
||||||
@ -46,11 +46,7 @@ class UnsubscribeGenerator:
|
|||||||
if start == -1 or end == -1 or start >= end:
|
if start == -1 or end == -1 or start >= end:
|
||||||
continue
|
continue
|
||||||
method = raw_method[start + 1 : end]
|
method = raw_method[start + 1 : end]
|
||||||
try:
|
|
||||||
url_data = urllib.parse.urlparse(method)
|
url_data = urllib.parse.urlparse(method)
|
||||||
except ValueError:
|
|
||||||
LOG.debug(f"Unsub has invalid method {method}. Ignoring.")
|
|
||||||
continue
|
|
||||||
if url_data.scheme == "mailto":
|
if url_data.scheme == "mailto":
|
||||||
if url_data.path == config.UNSUBSCRIBER:
|
if url_data.path == config.UNSUBSCRIBER:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
|
@ -103,9 +103,7 @@ class UnsubscribeHandler:
|
|||||||
):
|
):
|
||||||
return status.E509
|
return status.E509
|
||||||
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
||||||
alias_utils.change_alias_status(
|
alias_utils.change_alias_status(alias, enabled=False)
|
||||||
alias, enabled=False, message="Set enabled=False via unsubscribe header"
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
||||||
for mailbox in alias.mailboxes:
|
for mailbox in alias.mailboxes:
|
||||||
|
@ -22,11 +22,10 @@ def send_alias_creation_events_for_user(
|
|||||||
):
|
):
|
||||||
event_list.append(
|
event_list.append(
|
||||||
AliasCreated(
|
AliasCreated(
|
||||||
id=alias.id,
|
alias_id=alias.id,
|
||||||
email=alias.email,
|
alias_email=alias.email,
|
||||||
note=alias.note,
|
alias_note=alias.note,
|
||||||
enabled=alias.enabled,
|
enabled=alias.enabled,
|
||||||
created_at=int(alias.created_at.timestamp),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if len(event_list) >= chunk_size:
|
if len(event_list) >= chunk_size:
|
||||||
|
@ -12,7 +12,6 @@ import arrow
|
|||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.constants import JobType
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email import headers
|
from app.email import headers
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
@ -175,7 +174,7 @@ class ExportUserDataJob:
|
|||||||
jobs_in_db = (
|
jobs_in_db = (
|
||||||
Session.query(Job)
|
Session.query(Job)
|
||||||
.filter(
|
.filter(
|
||||||
Job.name == JobType.SEND_USER_REPORT.value,
|
Job.name == config.JOB_SEND_USER_REPORT,
|
||||||
Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT)
|
Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT)
|
||||||
== str(self._user.id),
|
== str(self._user.id),
|
||||||
Job.taken.is_(False),
|
Job.taken.is_(False),
|
||||||
@ -185,7 +184,7 @@ class ExportUserDataJob:
|
|||||||
if jobs_in_db > 0:
|
if jobs_in_db > 0:
|
||||||
return None
|
return None
|
||||||
return Job.create(
|
return Job.create(
|
||||||
name=JobType.SEND_USER_REPORT.value,
|
name=config.JOB_SEND_USER_REPORT,
|
||||||
payload={"user_id": self._user.id},
|
payload={"user_id": self._user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
|
@ -1,72 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import base64
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import arrow
|
|
||||||
|
|
||||||
from app.constants import JobType
|
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
|
||||||
from app.events.generated import event_pb2
|
|
||||||
from app.events.generated.event_pb2 import EventContent
|
|
||||||
from app.models import (
|
|
||||||
User,
|
|
||||||
Job,
|
|
||||||
PartnerUser,
|
|
||||||
)
|
|
||||||
from app.proton.proton_partner import get_proton_partner
|
|
||||||
from events.event_sink import EventSink
|
|
||||||
|
|
||||||
|
|
||||||
class SendEventToWebhookJob:
|
|
||||||
def __init__(self, user: User, event: EventContent):
|
|
||||||
self._user: User = user
|
|
||||||
self._event: EventContent = event
|
|
||||||
|
|
||||||
def run(self, sink: EventSink) -> bool:
|
|
||||||
# Check if the current user has a partner_id
|
|
||||||
try:
|
|
||||||
proton_partner_id = get_proton_partner().id
|
|
||||||
except ProtonPartnerNotSetUp:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# It has. Retrieve the information for the PartnerUser
|
|
||||||
partner_user = PartnerUser.get_by(
|
|
||||||
user_id=self._user.id, partner_id=proton_partner_id
|
|
||||||
)
|
|
||||||
if partner_user is None:
|
|
||||||
return True
|
|
||||||
event = event_pb2.Event(
|
|
||||||
user_id=self._user.id,
|
|
||||||
external_user_id=partner_user.external_user_id,
|
|
||||||
partner_id=partner_user.partner_id,
|
|
||||||
content=self._event,
|
|
||||||
)
|
|
||||||
|
|
||||||
serialized = event.SerializeToString()
|
|
||||||
return sink.send_data_to_webhook(serialized)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def create_from_job(job: Job) -> Optional[SendEventToWebhookJob]:
|
|
||||||
user = User.get(job.payload["user_id"])
|
|
||||||
if not user:
|
|
||||||
return None
|
|
||||||
event_data = base64.b64decode(job.payload["event"])
|
|
||||||
event = event_pb2.EventContent()
|
|
||||||
event.ParseFromString(event_data)
|
|
||||||
|
|
||||||
return SendEventToWebhookJob(user=user, event=event)
|
|
||||||
|
|
||||||
def store_job_in_db(
|
|
||||||
self, run_at: Optional[arrow.Arrow], commit: bool = True
|
|
||||||
) -> Job:
|
|
||||||
stub = self._event.SerializeToString()
|
|
||||||
return Job.create(
|
|
||||||
name=JobType.SEND_EVENT_TO_WEBHOOK.value,
|
|
||||||
payload={
|
|
||||||
"user_id": self._user.id,
|
|
||||||
"event": base64.b64encode(stub).decode("utf-8"),
|
|
||||||
},
|
|
||||||
run_at=run_at if run_at is not None else arrow.now(),
|
|
||||||
commit=commit,
|
|
||||||
)
|
|
@ -10,7 +10,7 @@ from app.config import (
|
|||||||
|
|
||||||
# this format allows clickable link to code source in PyCharm
|
# this format allows clickable link to code source in PyCharm
|
||||||
_log_format = (
|
_log_format = (
|
||||||
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - %(request_id)s"
|
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - "
|
||||||
'"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s'
|
'"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s'
|
||||||
)
|
)
|
||||||
_log_formatter = logging.Formatter(_log_format)
|
_log_formatter = logging.Formatter(_log_format)
|
||||||
@ -37,21 +37,6 @@ class EmailHandlerFilter(logging.Filter):
|
|||||||
return _MESSAGE_ID
|
return _MESSAGE_ID
|
||||||
|
|
||||||
|
|
||||||
class RequestIdFilter(logging.Filter):
|
|
||||||
"""automatically add request-id to keep track of a request"""
|
|
||||||
|
|
||||||
def filter(self, record):
|
|
||||||
from flask import g, has_request_context
|
|
||||||
|
|
||||||
request_id = ""
|
|
||||||
if has_request_context() and hasattr(g, "request_id"):
|
|
||||||
ctx_request_id = getattr(g, "request_id")
|
|
||||||
if ctx_request_id:
|
|
||||||
request_id = f"{ctx_request_id} - "
|
|
||||||
record.request_id = request_id
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _get_console_handler():
|
def _get_console_handler():
|
||||||
console_handler = logging.StreamHandler(sys.stdout)
|
console_handler = logging.StreamHandler(sys.stdout)
|
||||||
console_handler.setFormatter(_log_formatter)
|
console_handler.setFormatter(_log_formatter)
|
||||||
@ -69,7 +54,6 @@ def _get_logger(name) -> logging.Logger:
|
|||||||
logger.addHandler(_get_console_handler())
|
logger.addHandler(_get_console_handler())
|
||||||
|
|
||||||
logger.addFilter(EmailHandlerFilter())
|
logger.addFilter(EmailHandlerFilter())
|
||||||
logger.addFilter(RequestIdFilter())
|
|
||||||
|
|
||||||
# no propagation to avoid propagating to root logger
|
# no propagation to avoid propagating to root logger
|
||||||
logger.propagate = False
|
logger.propagate = False
|
||||||
|
@ -1,26 +1,21 @@
|
|||||||
import dataclasses
|
import dataclasses
|
||||||
import secrets
|
import secrets
|
||||||
from enum import Enum
|
import random
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from sqlalchemy.exc import IntegrityError
|
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.constants import JobType
|
from app.config import JOB_DELETE_MAILBOX
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
mailbox_already_used,
|
mailbox_already_used,
|
||||||
email_can_be_used_as_mailbox,
|
email_can_be_used_as_mailbox,
|
||||||
send_email,
|
send_email,
|
||||||
render,
|
render,
|
||||||
get_email_domain_part,
|
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email
|
from app.email_validation import is_valid_email
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Mailbox, Job, MailboxActivation, Alias
|
from app.models import User, Mailbox, Job, MailboxActivation
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import canonicalize_email, sanitize_email
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
@ -40,9 +35,8 @@ class OnlyPaidError(MailboxError):
|
|||||||
|
|
||||||
|
|
||||||
class CannotVerifyError(MailboxError):
|
class CannotVerifyError(MailboxError):
|
||||||
def __init__(self, msg: str, deleted_activation_code: bool = False):
|
def __init__(self, msg: str):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self.deleted_activation_code = deleted_activation_code
|
|
||||||
|
|
||||||
|
|
||||||
MAX_ACTIVATION_TRIES = 3
|
MAX_ACTIVATION_TRIES = 3
|
||||||
@ -56,21 +50,28 @@ def create_mailbox(
|
|||||||
use_digit_codes: bool = False,
|
use_digit_codes: bool = False,
|
||||||
send_link: bool = True,
|
send_link: bool = True,
|
||||||
) -> CreateMailboxOutput:
|
) -> CreateMailboxOutput:
|
||||||
email = sanitize_email(email)
|
|
||||||
if not user.is_premium():
|
if not user.is_premium():
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} has tried to create mailbox with {email} but is not premium"
|
f"User {user} has tried to create mailbox with {email} but is not premium"
|
||||||
)
|
)
|
||||||
raise OnlyPaidError()
|
raise OnlyPaidError()
|
||||||
check_email_for_mailbox(email, user)
|
if not is_valid_email(email):
|
||||||
new_mailbox: Mailbox = Mailbox.create(
|
LOG.i(
|
||||||
email=email, user_id=user.id, verified=verified, commit=True
|
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
||||||
)
|
)
|
||||||
emit_user_audit_log(
|
raise MailboxError("Invalid email")
|
||||||
user=user,
|
elif mailbox_already_used(email, user):
|
||||||
action=UserAuditLogAction.CreateMailbox,
|
LOG.i(
|
||||||
message=f"Create mailbox {new_mailbox.id} ({new_mailbox.email}). Verified={verified}",
|
f"User {user} has tried to create mailbox with {email} but email is already used"
|
||||||
commit=True,
|
)
|
||||||
|
raise MailboxError("Email already used")
|
||||||
|
elif not email_can_be_used_as_mailbox(email):
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid email")
|
||||||
|
new_mailbox = Mailbox.create(
|
||||||
|
email=email, user_id=user.id, verified=verified, commit=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if verified:
|
if verified:
|
||||||
@ -94,29 +95,8 @@ def create_mailbox(
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def check_email_for_mailbox(email, user):
|
|
||||||
if not is_valid_email(email):
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid email")
|
|
||||||
elif mailbox_already_used(email, user):
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to create mailbox with {email} but email is already used"
|
|
||||||
)
|
|
||||||
raise MailboxError("Email already used")
|
|
||||||
elif not email_can_be_used_as_mailbox(email):
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid email")
|
|
||||||
|
|
||||||
|
|
||||||
def delete_mailbox(
|
def delete_mailbox(
|
||||||
user: User,
|
user: User, mailbox_id: int, transfer_mailbox_id: Optional[int]
|
||||||
mailbox_id: int,
|
|
||||||
transfer_mailbox_id: Optional[int],
|
|
||||||
send_mail: bool = True,
|
|
||||||
) -> Mailbox:
|
) -> Mailbox:
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
|
||||||
@ -149,20 +129,19 @@ def delete_mailbox(
|
|||||||
|
|
||||||
if not transfer_mailbox.verified:
|
if not transfer_mailbox.verified:
|
||||||
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
|
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
|
||||||
raise MailboxError("Your new mailbox is not verified")
|
MailboxError("Your new mailbox is not verified")
|
||||||
|
|
||||||
# Schedule delete account job
|
# Schedule delete account job
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
|
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.DELETE_MAILBOX.value,
|
name=JOB_DELETE_MAILBOX,
|
||||||
payload={
|
payload={
|
||||||
"mailbox_id": mailbox.id,
|
"mailbox_id": mailbox.id,
|
||||||
"transfer_mailbox_id": transfer_mailbox_id
|
"transfer_mailbox_id": transfer_mailbox_id
|
||||||
if transfer_mailbox_id and transfer_mailbox_id > 0
|
if transfer_mailbox_id and transfer_mailbox_id > 0
|
||||||
else None,
|
else None,
|
||||||
"send_mail": send_mail,
|
|
||||||
},
|
},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
@ -184,17 +163,17 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
|
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
|
||||||
)
|
)
|
||||||
raise MailboxError("Invalid mailbox")
|
raise MailboxError("Invalid mailbox")
|
||||||
if mailbox.user_id != user.id:
|
if mailbox.verified:
|
||||||
LOG.i(
|
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid mailbox")
|
|
||||||
if mailbox.verified and not mailbox.new_email:
|
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
||||||
)
|
)
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
return mailbox
|
return mailbox
|
||||||
|
if mailbox.user_id != user.id:
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid mailbox")
|
||||||
|
|
||||||
activation = (
|
activation = (
|
||||||
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
|
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
|
||||||
@ -209,10 +188,7 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
if activation.tries >= MAX_ACTIVATION_TRIES:
|
if activation.tries >= MAX_ACTIVATION_TRIES:
|
||||||
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
|
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
raise CannotVerifyError(
|
raise CannotVerifyError("Invalid activation code. Please request another code.")
|
||||||
"Invalid activation code. Please request another code.",
|
|
||||||
deleted_activation_code=True,
|
|
||||||
)
|
|
||||||
if activation.created_at < arrow.now().shift(minutes=-15):
|
if activation.created_at < arrow.now().shift(minutes=-15):
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
|
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
|
||||||
@ -226,34 +202,8 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
activation.tries = activation.tries + 1
|
activation.tries = activation.tries + 1
|
||||||
Session.commit()
|
Session.commit()
|
||||||
raise CannotVerifyError("Invalid activation code")
|
raise CannotVerifyError("Invalid activation code")
|
||||||
if mailbox.new_email:
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has verified mailbox email change from {mailbox.email} to {mailbox.new_email}"
|
|
||||||
)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
|
||||||
)
|
|
||||||
mailbox.email = mailbox.new_email
|
|
||||||
mailbox.new_email = None
|
|
||||||
mailbox.verified = True
|
|
||||||
elif not mailbox.verified:
|
|
||||||
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
||||||
mailbox.verified = True
|
mailbox.verified = True
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.VerifyMailbox,
|
|
||||||
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
|
||||||
raise MailboxError("That address is already in use")
|
|
||||||
|
|
||||||
else:
|
|
||||||
LOG.i(
|
|
||||||
"User {user} alread has mailbox {mailbox} verified and no pending email change"
|
|
||||||
)
|
|
||||||
|
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
return mailbox
|
return mailbox
|
||||||
|
|
||||||
@ -263,10 +213,7 @@ def generate_activation_code(
|
|||||||
) -> MailboxActivation:
|
) -> MailboxActivation:
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
if use_digit_code:
|
if use_digit_code:
|
||||||
if config.MAILBOX_VERIFICATION_OVERRIDE_CODE:
|
code = "{:06d}".format(random.randint(1, 999999))
|
||||||
code = config.MAILBOX_VERIFICATION_OVERRIDE_CODE
|
|
||||||
else:
|
|
||||||
code = "{:06d}".format(secrets.randbelow(1000000))[:6]
|
|
||||||
else:
|
else:
|
||||||
code = secrets.token_urlsafe(16)
|
code = secrets.token_urlsafe(16)
|
||||||
return MailboxActivation.create(
|
return MailboxActivation.create(
|
||||||
@ -278,10 +225,7 @@ def generate_activation_code(
|
|||||||
|
|
||||||
|
|
||||||
def send_verification_email(
|
def send_verification_email(
|
||||||
user: User,
|
user: User, mailbox: Mailbox, activation: MailboxActivation, send_link: bool = True
|
||||||
mailbox: Mailbox,
|
|
||||||
activation: MailboxActivation,
|
|
||||||
send_link: bool = True,
|
|
||||||
):
|
):
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
|
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
|
||||||
@ -314,197 +258,3 @@ def send_verification_email(
|
|||||||
mailbox_email=mailbox.email,
|
mailbox_email=mailbox.email,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def send_change_email(user: User, mailbox: Mailbox, activation: MailboxActivation):
|
|
||||||
verification_url = f"{config.URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox.id}&code={activation.code}"
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
mailbox.new_email,
|
|
||||||
"Confirm mailbox change on SimpleLogin",
|
|
||||||
render(
|
|
||||||
"transactional/verify-mailbox-change.txt.jinja2",
|
|
||||||
user=user,
|
|
||||||
link=verification_url,
|
|
||||||
mailbox_email=mailbox.email,
|
|
||||||
mailbox_new_email=mailbox.new_email,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/verify-mailbox-change.html",
|
|
||||||
user=user,
|
|
||||||
link=verification_url,
|
|
||||||
mailbox_email=mailbox.email,
|
|
||||||
mailbox_new_email=mailbox.new_email,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def request_mailbox_email_change(
|
|
||||||
user: User,
|
|
||||||
mailbox: Mailbox,
|
|
||||||
new_email: str,
|
|
||||||
email_ownership_verified: bool = False,
|
|
||||||
send_email: bool = True,
|
|
||||||
use_digit_codes: bool = False,
|
|
||||||
) -> CreateMailboxOutput:
|
|
||||||
new_email = sanitize_email(new_email)
|
|
||||||
if new_email == mailbox.email:
|
|
||||||
raise MailboxError("Same email")
|
|
||||||
check_email_for_mailbox(new_email, user)
|
|
||||||
if email_ownership_verified:
|
|
||||||
mailbox.email = new_email
|
|
||||||
mailbox.new_email = None
|
|
||||||
mailbox.verified = True
|
|
||||||
else:
|
|
||||||
mailbox.new_email = new_email
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Updated mailbox {mailbox.id} email ({new_email}) pre-verified({email_ownership_verified}",
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
Session.commit()
|
|
||||||
except IntegrityError:
|
|
||||||
LOG.i(f"This email {new_email} is already pending for some mailbox")
|
|
||||||
Session.rollback()
|
|
||||||
raise MailboxError("Email already in use")
|
|
||||||
|
|
||||||
if email_ownership_verified:
|
|
||||||
LOG.i(f"User {user} as created a pre-verified mailbox with {new_email}")
|
|
||||||
return CreateMailboxOutput(mailbox=mailbox, activation=None)
|
|
||||||
|
|
||||||
LOG.i(f"User {user} has updated mailbox email with {new_email}")
|
|
||||||
activation = generate_activation_code(mailbox, use_digit_code=use_digit_codes)
|
|
||||||
output = CreateMailboxOutput(mailbox=mailbox, activation=activation)
|
|
||||||
|
|
||||||
if not send_email:
|
|
||||||
LOG.i(f"Skipping sending validation email for mailbox {mailbox}")
|
|
||||||
return output
|
|
||||||
|
|
||||||
send_change_email(
|
|
||||||
user,
|
|
||||||
mailbox,
|
|
||||||
activation=activation,
|
|
||||||
)
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
class MailboxEmailChangeError(Enum):
|
|
||||||
InvalidId = 1
|
|
||||||
EmailAlreadyUsed = 2
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
|
||||||
class MailboxEmailChangeResult:
|
|
||||||
error: Optional[MailboxEmailChangeError]
|
|
||||||
message: str
|
|
||||||
message_category: str
|
|
||||||
|
|
||||||
|
|
||||||
def perform_mailbox_email_change(mailbox_id: int) -> MailboxEmailChangeResult:
|
|
||||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
|
||||||
|
|
||||||
# new_email can be None if user cancels change in the meantime
|
|
||||||
if mailbox and mailbox.new_email:
|
|
||||||
user = mailbox.user
|
|
||||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
|
||||||
return MailboxEmailChangeResult(
|
|
||||||
error=MailboxEmailChangeError.EmailAlreadyUsed,
|
|
||||||
message=f"{mailbox.new_email} is already used",
|
|
||||||
message_category="error",
|
|
||||||
)
|
|
||||||
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
|
||||||
)
|
|
||||||
mailbox.email = mailbox.new_email
|
|
||||||
mailbox.new_email = None
|
|
||||||
|
|
||||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
|
||||||
mailbox.verified = True
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
LOG.d("Mailbox change %s is verified", mailbox)
|
|
||||||
return MailboxEmailChangeResult(
|
|
||||||
error=None,
|
|
||||||
message=f"The {mailbox.email} is updated",
|
|
||||||
message_category="success",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return MailboxEmailChangeResult(
|
|
||||||
error=MailboxEmailChangeError.InvalidId,
|
|
||||||
message="Invalid link",
|
|
||||||
message_category="error",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def cancel_email_change(mailbox_id: int, user: User):
|
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
|
||||||
if not mailbox:
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to cancel a mailbox an unknown mailbox {mailbox_id}"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid mailbox")
|
|
||||||
if mailbox.user.id != user.id:
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to cancel a mailbox {mailbox} owned by another user"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid mailbox")
|
|
||||||
mailbox.new_email = None
|
|
||||||
LOG.i(f"User {mailbox.user} has cancelled mailbox email change")
|
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
|
||||||
|
|
||||||
|
|
||||||
def __get_alias_mailbox_from_email(
|
|
||||||
email_address: str, alias: Alias
|
|
||||||
) -> Optional[Mailbox]:
|
|
||||||
for mailbox in alias.mailboxes:
|
|
||||||
if mailbox.email == email_address:
|
|
||||||
return mailbox
|
|
||||||
|
|
||||||
for authorized_address in mailbox.authorized_addresses:
|
|
||||||
if authorized_address.email == email_address:
|
|
||||||
LOG.d(
|
|
||||||
"Found an authorized address for %s %s %s",
|
|
||||||
alias,
|
|
||||||
mailbox,
|
|
||||||
authorized_address,
|
|
||||||
)
|
|
||||||
return mailbox
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def __get_alias_mailbox_from_email_or_canonical_email(
|
|
||||||
email_address: str, alias: Alias
|
|
||||||
) -> Optional[Mailbox]:
|
|
||||||
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
|
||||||
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
|
||||||
mbox = __get_alias_mailbox_from_email(email_address, alias)
|
|
||||||
if mbox is not None:
|
|
||||||
return mbox
|
|
||||||
canonical_email = canonicalize_email(email_address)
|
|
||||||
if canonical_email != email_address:
|
|
||||||
return __get_alias_mailbox_from_email(canonical_email, alias)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_mailbox_for_reply_phase(
|
|
||||||
envelope_mail_from: str, header_mail_from: str, alias
|
|
||||||
) -> Optional[Mailbox]:
|
|
||||||
"""return the corresponding mailbox given the mail_from and alias
|
|
||||||
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
|
||||||
"""
|
|
||||||
mbox = __get_alias_mailbox_from_email_or_canonical_email(envelope_mail_from, alias)
|
|
||||||
if mbox is not None:
|
|
||||||
return mbox
|
|
||||||
if not header_mail_from:
|
|
||||||
return None
|
|
||||||
envelope_from_domain = get_email_domain_part(envelope_mail_from)
|
|
||||||
header_from_domain = get_email_domain_part(header_mail_from)
|
|
||||||
if envelope_from_domain != header_from_domain:
|
|
||||||
return None
|
|
||||||
# For services that use VERP sending (envelope from has encoded data to account for bounces)
|
|
||||||
# if the domain is the same in the header from as the envelope from we can use the header from
|
|
||||||
return __get_alias_mailbox_from_email_or_canonical_email(header_mail_from, alias)
|
|
||||||
|
@ -24,15 +24,14 @@ from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
|||||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import deferred
|
from sqlalchemy.orm import deferred
|
||||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
|
||||||
from sqlalchemy.sql import and_
|
from sqlalchemy.sql import and_
|
||||||
from sqlalchemy_utils import ArrowType
|
from sqlalchemy_utils import ArrowType
|
||||||
|
|
||||||
from app import config, rate_limiter
|
from app import config, rate_limiter
|
||||||
from app import s3
|
from app import s3
|
||||||
from app.constants import JobType
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_mx_domains
|
from app.dns_utils import get_mx_domains
|
||||||
|
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AliasInTrashError,
|
AliasInTrashError,
|
||||||
DirectoryInTrashError,
|
DirectoryInTrashError,
|
||||||
@ -158,8 +157,6 @@ class File(Base, ModelMixin):
|
|||||||
path = sa.Column(sa.String(128), unique=True, nullable=False)
|
path = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_file_user_id", "user_id"),)
|
|
||||||
|
|
||||||
def get_url(self, expires_in=3600):
|
def get_url(self, expires_in=3600):
|
||||||
return s3.get_url(self.path, expires_in)
|
return s3.get_url(self.path, expires_in)
|
||||||
|
|
||||||
@ -239,7 +236,6 @@ class AuditLogActionEnum(EnumE):
|
|||||||
disable_user = 9
|
disable_user = 9
|
||||||
enable_user = 10
|
enable_user = 10
|
||||||
stop_trial = 11
|
stop_trial = 11
|
||||||
unlink_user = 12
|
|
||||||
|
|
||||||
|
|
||||||
class Phase(EnumE):
|
class Phase(EnumE):
|
||||||
@ -276,12 +272,6 @@ class AliasDeleteReason(EnumE):
|
|||||||
CustomDomainDeleted = 5
|
CustomDomainDeleted = 5
|
||||||
|
|
||||||
|
|
||||||
class JobPriority(EnumE):
|
|
||||||
Low = 1
|
|
||||||
Default = 50
|
|
||||||
High = 100
|
|
||||||
|
|
||||||
|
|
||||||
class IntEnumType(sa.types.TypeDecorator):
|
class IntEnumType(sa.types.TypeDecorator):
|
||||||
impl = sa.Integer
|
impl = sa.Integer
|
||||||
|
|
||||||
@ -328,8 +318,6 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
|||||||
|
|
||||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_hibp_notified_alias_user_id", "user_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class Fido(Base, ModelMixin):
|
class Fido(Base, ModelMixin):
|
||||||
__tablename__ = "fido"
|
__tablename__ = "fido"
|
||||||
@ -344,13 +332,11 @@ class Fido(Base, ModelMixin):
|
|||||||
name = sa.Column(sa.String(128), nullable=False, unique=False)
|
name = sa.Column(sa.String(128), nullable=False, unique=False)
|
||||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_fido_user_id", "user_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
|
||||||
FLAG_FREE_DISABLE_CREATE_CONTACTS = 1 << 0
|
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
|
||||||
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
||||||
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
||||||
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
||||||
@ -369,7 +355,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||||
)
|
)
|
||||||
|
|
||||||
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
||||||
|
|
||||||
# an account can be disabled if having harmful behavior
|
# an account can be disabled if having harmful behavior
|
||||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||||
@ -557,7 +543,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
# bitwise flags. Allow for future expansion
|
# bitwise flags. Allow for future expansion
|
||||||
flags = sa.Column(
|
flags = sa.Column(
|
||||||
sa.BigInteger,
|
sa.BigInteger,
|
||||||
default=FLAG_FREE_DISABLE_CREATE_CONTACTS,
|
default=FLAG_FREE_DISABLE_CREATE_ALIAS,
|
||||||
server_default="0",
|
server_default="0",
|
||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
@ -578,17 +564,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||||
),
|
),
|
||||||
sa.Index("ix_users_delete_on", delete_on),
|
sa.Index("ix_users_delete_on", delete_on),
|
||||||
sa.Index("ix_users_default_mailbox_id", default_mailbox_id),
|
|
||||||
sa.Index(
|
|
||||||
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
|
|
||||||
),
|
|
||||||
sa.Index("ix_users_profile_picture_id", profile_picture_id),
|
|
||||||
sa.Index(
|
|
||||||
"idx_users_email_trgm",
|
|
||||||
"email",
|
|
||||||
postgresql_ops={"email": "gin_trgm_ops"},
|
|
||||||
postgresql_using="gin",
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -641,23 +616,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
if "alternative_id" not in kwargs:
|
if "alternative_id" not in kwargs:
|
||||||
user.alternative_id = str(uuid.uuid4())
|
user.alternative_id = str(uuid.uuid4())
|
||||||
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
trail = ". Created from partner" if from_partner else ""
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.CreateUser,
|
|
||||||
message=f"Created user {email}{trail}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# If the user is created from partner, do not notify
|
# If the user is created from partner, do not notify
|
||||||
# nor give a trial
|
# nor give a trial
|
||||||
if from_partner:
|
if from_partner:
|
||||||
user.flags = user.flags | User.FLAG_CREATED_FROM_PARTNER
|
user.flags = User.FLAG_CREATED_FROM_PARTNER
|
||||||
user.notification = False
|
user.notification = False
|
||||||
user.trial_end = None
|
user.trial_end = None
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.SEND_PROTON_WELCOME_1.value,
|
name=config.JOB_SEND_PROTON_WELCOME_1,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
)
|
)
|
||||||
@ -683,17 +649,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
|
|
||||||
# Schedule onboarding emails
|
# Schedule onboarding emails
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.ONBOARDING_1.value,
|
name=config.JOB_ONBOARDING_1,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now().shift(days=1),
|
run_at=arrow.now().shift(days=1),
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.ONBOARDING_2.value,
|
name=config.JOB_ONBOARDING_2,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now().shift(days=2),
|
run_at=arrow.now().shift(days=2),
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JobType.ONBOARDING_4.value,
|
name=config.JOB_ONBOARDING_4,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now().shift(days=3),
|
run_at=arrow.now().shift(days=3),
|
||||||
)
|
)
|
||||||
@ -1007,7 +973,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
def has_custom_domain(self):
|
def has_custom_domain(self):
|
||||||
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
|
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
|
||||||
|
|
||||||
def custom_domains(self) -> List["CustomDomain"]:
|
def custom_domains(self):
|
||||||
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
||||||
|
|
||||||
def available_domains_for_random_alias(
|
def available_domains_for_random_alias(
|
||||||
@ -1202,7 +1168,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
def can_create_contacts(self) -> bool:
|
def can_create_contacts(self) -> bool:
|
||||||
if self.is_premium():
|
if self.is_premium():
|
||||||
return True
|
return True
|
||||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_CONTACTS == 0:
|
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
|
||||||
return True
|
return True
|
||||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||||
|
|
||||||
@ -1245,8 +1211,6 @@ class ActivationCode(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_activation_code_user_id", "user_id"),)
|
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1263,8 +1227,6 @@ class ResetPasswordCode(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_reset_password_code_user_id", "user_id"),)
|
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1307,8 +1269,6 @@ class MfaBrowser(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_mfa_browser_user_id", "user_id"),)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_new(cls, user, token_length=64) -> "MfaBrowser":
|
def create_new(cls, user, token_length=64) -> "MfaBrowser":
|
||||||
found = False
|
found = False
|
||||||
@ -1367,12 +1327,6 @@ class Client(Base, ModelMixin):
|
|||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
referral = orm.relationship("Referral")
|
referral = orm.relationship("Referral")
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_client_user_id", "user_id"),
|
|
||||||
sa.Index("ix_client_icon_id", "icon_id"),
|
|
||||||
sa.Index("ix_client_referral_id", "referral_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def nb_user(self):
|
def nb_user(self):
|
||||||
return ClientUser.filter_by(client_id=self.id).count()
|
return ClientUser.filter_by(client_id=self.id).count()
|
||||||
|
|
||||||
@ -1421,8 +1375,6 @@ class RedirectUri(Base, ModelMixin):
|
|||||||
|
|
||||||
client = orm.relationship(Client, backref="redirect_uris")
|
client = orm.relationship(Client, backref="redirect_uris")
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_redirect_uri_client_id", "client_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthorizationCode(Base, ModelMixin):
|
class AuthorizationCode(Base, ModelMixin):
|
||||||
__tablename__ = "authorization_code"
|
__tablename__ = "authorization_code"
|
||||||
@ -1444,11 +1396,6 @@ class AuthorizationCode(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_authorization_code_client_id", "client_id"),
|
|
||||||
sa.Index("ix_authorization_code_user_id", "user_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1471,11 +1418,6 @@ class OauthToken(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_oauth_token_user_id", "user_id"),
|
|
||||||
sa.Index("ix_oauth_token_client_id", "client_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1629,7 +1571,6 @@ class Alias(Base, ModelMixin):
|
|||||||
postgresql_ops={"note": "gin_trgm_ops"},
|
postgresql_ops={"note": "gin_trgm_ops"},
|
||||||
postgresql_using="gin",
|
postgresql_using="gin",
|
||||||
),
|
),
|
||||||
Index("ix_alias_original_owner_id", "original_owner_id"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
@ -1672,7 +1613,7 @@ class Alias(Base, ModelMixin):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_custom_domain(alias_address: str) -> Optional["CustomDomain"]:
|
def get_custom_domain(alias_address) -> Optional["CustomDomain"]:
|
||||||
alias_domain = validate_email(
|
alias_domain = validate_email(
|
||||||
alias_address, check_deliverability=False, allow_smtputf8=False
|
alias_address, check_deliverability=False, allow_smtputf8=False
|
||||||
).domain
|
).domain
|
||||||
@ -1715,15 +1656,22 @@ class Alias(Base, ModelMixin):
|
|||||||
custom_domain = Alias.get_custom_domain(email)
|
custom_domain = Alias.get_custom_domain(email)
|
||||||
if custom_domain:
|
if custom_domain:
|
||||||
new_alias.custom_domain_id = custom_domain.id
|
new_alias.custom_domain_id = custom_domain.id
|
||||||
else:
|
|
||||||
custom_domain = CustomDomain.get(kw["custom_domain_id"])
|
|
||||||
# If it comes from a custom domain created from partner. Mark it as created from partner
|
|
||||||
if custom_domain is not None and custom_domain.partner_id is not None:
|
|
||||||
new_alias.flags = (new_alias.flags or 0) | Alias.FLAG_PARTNER_CREATED
|
|
||||||
|
|
||||||
Session.add(new_alias)
|
Session.add(new_alias)
|
||||||
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
||||||
|
|
||||||
|
# Internal import to avoid global import cycles
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
||||||
|
|
||||||
|
event = AliasCreated(
|
||||||
|
alias_id=new_alias.id,
|
||||||
|
alias_email=new_alias.email,
|
||||||
|
alias_note=new_alias.note,
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
||||||
|
|
||||||
if (
|
if (
|
||||||
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
|
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
|
||||||
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
|
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
|
||||||
@ -1736,23 +1684,6 @@ class Alias(Base, ModelMixin):
|
|||||||
if flush:
|
if flush:
|
||||||
Session.flush()
|
Session.flush()
|
||||||
|
|
||||||
# Internal import to avoid global import cycles
|
|
||||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
|
||||||
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
|
||||||
|
|
||||||
event = AliasCreated(
|
|
||||||
id=new_alias.id,
|
|
||||||
email=new_alias.email,
|
|
||||||
note=new_alias.note,
|
|
||||||
enabled=True,
|
|
||||||
created_at=int(new_alias.created_at.timestamp),
|
|
||||||
)
|
|
||||||
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
|
||||||
emit_alias_audit_log(
|
|
||||||
new_alias, AliasAuditLogAction.CreateAlias, "New alias created"
|
|
||||||
)
|
|
||||||
|
|
||||||
return new_alias
|
return new_alias
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -1931,22 +1862,17 @@ class Contact(Base, ModelMixin):
|
|||||||
|
|
||||||
MAX_NAME_LENGTH = 512
|
MAX_NAME_LENGTH = 512
|
||||||
|
|
||||||
FLAG_PARTNER_CREATED = 1 << 0
|
|
||||||
|
|
||||||
__tablename__ = "contact"
|
__tablename__ = "contact"
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"),
|
sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"),
|
||||||
sa.Index("ix_contact_user_id_id", "user_id", "id"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"),
|
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||||
nullable=False,
|
|
||||||
)
|
)
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||||
nullable=False,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
name = sa.Column(
|
name = sa.Column(
|
||||||
@ -1994,9 +1920,6 @@ class Contact(Base, ModelMixin):
|
|||||||
# whether contact is created automatically during the forward phase
|
# whether contact is created automatically during the forward phase
|
||||||
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
|
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
|
||||||
|
|
||||||
# contact flags
|
|
||||||
flags = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def email(self):
|
def email(self):
|
||||||
return self.website_email
|
return self.website_email
|
||||||
@ -2126,15 +2049,11 @@ class Contact(Base, ModelMixin):
|
|||||||
|
|
||||||
class EmailLog(Base, ModelMixin):
|
class EmailLog(Base, ModelMixin):
|
||||||
__tablename__ = "email_log"
|
__tablename__ = "email_log"
|
||||||
__table_args__ = (
|
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||||
Index("ix_email_log_created_at", "created_at"),
|
|
||||||
Index("ix_email_log_mailbox_id", "mailbox_id"),
|
|
||||||
Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
|
|
||||||
Index("ix_email_log_refused_email_id", "refused_email_id"),
|
|
||||||
Index("ix_email_log_user_id_email_log_id", "user_id", "id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
user_id = sa.Column(
|
||||||
|
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||||
|
)
|
||||||
contact_id = sa.Column(
|
contact_id = sa.Column(
|
||||||
sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True
|
||||||
)
|
)
|
||||||
@ -2406,12 +2325,10 @@ class AliasUsedOn(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||||
sa.Index("ix_alias_used_on_user_id", "user_id"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||||
nullable=False,
|
|
||||||
)
|
)
|
||||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||||
|
|
||||||
@ -2434,8 +2351,6 @@ class ApiKey(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_api_key_user_id", "user_id"),)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, user_id, name=None, **kwargs):
|
def create(cls, user_id, name=None, **kwargs):
|
||||||
code = random_string(60)
|
code = random_string(60)
|
||||||
@ -2503,18 +2418,6 @@ class CustomDomain(Base, ModelMixin):
|
|||||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||||
)
|
)
|
||||||
|
|
||||||
partner_id = sa.Column(
|
|
||||||
sa.Integer,
|
|
||||||
sa.ForeignKey("partner.id"),
|
|
||||||
nullable=True,
|
|
||||||
default=None,
|
|
||||||
server_default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
pending_deletion = sa.Column(
|
|
||||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index(
|
Index(
|
||||||
"ix_unique_domain", # Index name
|
"ix_unique_domain", # Index name
|
||||||
@ -2522,8 +2425,6 @@ class CustomDomain(Base, ModelMixin):
|
|||||||
unique=True,
|
unique=True,
|
||||||
postgresql_where=Column("ownership_verified"),
|
postgresql_where=Column("ownership_verified"),
|
||||||
), # The condition
|
), # The condition
|
||||||
Index("ix_custom_domain_user_id", "user_id"),
|
|
||||||
Index("ix_custom_domain_pending_deletion", "pending_deletion"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
|
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
|
||||||
@ -2541,6 +2442,9 @@ class CustomDomain(Base, ModelMixin):
|
|||||||
def get_trash_url(self):
|
def get_trash_url(self):
|
||||||
return config.URL + f"/dashboard/domains/{self.id}/trash"
|
return config.URL + f"/dashboard/domains/{self.id}/trash"
|
||||||
|
|
||||||
|
def get_ownership_dns_txt_value(self):
|
||||||
|
return f"sl-verification={self.ownership_txt_token}"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, **kwargs):
|
def create(cls, **kwargs):
|
||||||
domain = kwargs.get("domain")
|
domain = kwargs.get("domain")
|
||||||
@ -2637,7 +2541,6 @@ class DomainDeletedAlias(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
|
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
|
||||||
sa.Index("ix_domain_deleted_alias_user_id", "user_id"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
email = sa.Column(sa.String(256), nullable=False)
|
email = sa.Column(sa.String(256), nullable=False)
|
||||||
@ -2698,8 +2601,6 @@ class Coupon(Base, ModelMixin):
|
|||||||
# a coupon can have an expiration
|
# a coupon can have an expiration
|
||||||
expires_date = sa.Column(ArrowType, nullable=True)
|
expires_date = sa.Column(ArrowType, nullable=True)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_coupon_used_by_user_id", "used_by_user_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class Directory(Base, ModelMixin):
|
class Directory(Base, ModelMixin):
|
||||||
__tablename__ = "directory"
|
__tablename__ = "directory"
|
||||||
@ -2714,8 +2615,6 @@ class Directory(Base, ModelMixin):
|
|||||||
"Mailbox", secondary="directory_mailbox", lazy="joined"
|
"Mailbox", secondary="directory_mailbox", lazy="joined"
|
||||||
)
|
)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_directory_user_id", "user_id"),)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mailboxes(self):
|
def mailboxes(self):
|
||||||
if self._mailboxes:
|
if self._mailboxes:
|
||||||
@ -2776,19 +2675,12 @@ class Job(Base, ModelMixin):
|
|||||||
nullable=False,
|
nullable=False,
|
||||||
server_default=str(JobState.ready.value),
|
server_default=str(JobState.ready.value),
|
||||||
default=JobState.ready.value,
|
default=JobState.ready.value,
|
||||||
|
index=True,
|
||||||
)
|
)
|
||||||
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
|
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
|
||||||
taken_at = sa.Column(ArrowType, nullable=True)
|
taken_at = sa.Column(ArrowType, nullable=True)
|
||||||
priority = sa.Column(
|
|
||||||
IntEnumType(JobPriority),
|
|
||||||
default=JobPriority.Default,
|
|
||||||
server_default=str(JobPriority.Default.value),
|
|
||||||
nullable=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (Index("ix_state_run_at_taken_at", state, run_at, taken_at),)
|
||||||
Index("ix_state_run_at_taken_at_priority", state, run_at, taken_at, priority),
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Job {self.id} {self.name} {self.payload}>"
|
return f"<Job {self.id} {self.name} {self.payload}>"
|
||||||
@ -2796,7 +2688,9 @@ class Job(Base, ModelMixin):
|
|||||||
|
|
||||||
class Mailbox(Base, ModelMixin):
|
class Mailbox(Base, ModelMixin):
|
||||||
__tablename__ = "mailbox"
|
__tablename__ = "mailbox"
|
||||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
user_id = sa.Column(
|
||||||
|
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||||
|
)
|
||||||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||||
verified = sa.Column(sa.Boolean, default=False, nullable=False)
|
verified = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||||
force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False)
|
force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False)
|
||||||
@ -2822,17 +2716,7 @@ class Mailbox(Base, ModelMixin):
|
|||||||
|
|
||||||
generic_subject = sa.Column(sa.String(78), nullable=True)
|
generic_subject = sa.Column(sa.String(78), nullable=True)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),)
|
||||||
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
|
|
||||||
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
|
|
||||||
# index on email column using pg_trgm
|
|
||||||
Index(
|
|
||||||
"ix_mailbox_email_trgm_idx",
|
|
||||||
"email",
|
|
||||||
postgresql_ops={"email": "gin_trgm_ops"},
|
|
||||||
postgresql_using="gin",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
|
|
||||||
@ -2854,19 +2738,23 @@ class Mailbox(Base, ModelMixin):
|
|||||||
return len(alias_ids)
|
return len(alias_ids)
|
||||||
|
|
||||||
def is_proton(self) -> bool:
|
def is_proton(self) -> bool:
|
||||||
for proton_email_domain in config.PROTON_EMAIL_DOMAINS:
|
if (
|
||||||
if self.email.endswith(f"@{proton_email_domain}"):
|
self.email.endswith("@proton.me")
|
||||||
|
or self.email.endswith("@protonmail.com")
|
||||||
|
or self.email.endswith("@protonmail.ch")
|
||||||
|
or self.email.endswith("@proton.ch")
|
||||||
|
or self.email.endswith("@pm.me")
|
||||||
|
):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
from app.email_utils import get_email_local_part
|
from app.email_utils import get_email_local_part
|
||||||
|
|
||||||
mx_domains = get_mx_domains(get_email_local_part(self.email))
|
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
||||||
|
|
||||||
proton_mx_domains = config.PROTON_MX_SERVERS
|
|
||||||
# Proton is the first domain
|
# Proton is the first domain
|
||||||
for prio in mx_domains:
|
if mx_domains and mx_domains[0][1] in (
|
||||||
for mx_domain in mx_domains[prio]:
|
"mail.protonmail.ch.",
|
||||||
if mx_domain in proton_mx_domains:
|
"mailsec.protonmail.ch.",
|
||||||
|
):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@ -2965,8 +2853,6 @@ class RefusedEmail(Base, ModelMixin):
|
|||||||
# toggle this when email content (stored at full_report_path & path are deleted)
|
# toggle this when email content (stored at full_report_path & path are deleted)
|
||||||
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_refused_email_user_id", "user_id"),)
|
|
||||||
|
|
||||||
def get_url(self, expires_in=3600):
|
def get_url(self, expires_in=3600):
|
||||||
if self.path:
|
if self.path:
|
||||||
return s3.get_url(self.path, expires_in)
|
return s3.get_url(self.path, expires_in)
|
||||||
@ -2989,8 +2875,6 @@ class Referral(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
|
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_referral_user_id", "user_id"),)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def nb_user(self) -> int:
|
def nb_user(self) -> int:
|
||||||
return User.filter_by(referral_id=self.id, activated=True).count()
|
return User.filter_by(referral_id=self.id, activated=True).count()
|
||||||
@ -3030,12 +2914,6 @@ class SentAlert(Base, ModelMixin):
|
|||||||
to_email = sa.Column(sa.String(256), nullable=False)
|
to_email = sa.Column(sa.String(256), nullable=False)
|
||||||
alert_type = sa.Column(sa.String(256), nullable=False)
|
alert_type = sa.Column(sa.String(256), nullable=False)
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_sent_alert_user_id", "user_id"),
|
|
||||||
sa.Index("ix_sent_alert_to_email", "to_email"),
|
|
||||||
sa.Index("ix_sent_alert_alert_type", "alert_type"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AliasMailbox(Base, ModelMixin):
|
class AliasMailbox(Base, ModelMixin):
|
||||||
__tablename__ = "alias_mailbox"
|
__tablename__ = "alias_mailbox"
|
||||||
@ -3044,8 +2922,7 @@ class AliasMailbox(Base, ModelMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||||
nullable=False,
|
|
||||||
)
|
)
|
||||||
mailbox_id = sa.Column(
|
mailbox_id = sa.Column(
|
||||||
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
|
||||||
@ -3060,8 +2937,7 @@ class AliasHibp(Base, ModelMixin):
|
|||||||
__table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),)
|
__table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.Integer(),
|
sa.Integer(), sa.ForeignKey("alias.id", ondelete="cascade"), index=True
|
||||||
sa.ForeignKey("alias.id", ondelete="cascade"),
|
|
||||||
)
|
)
|
||||||
hibp_id = sa.Column(
|
hibp_id = sa.Column(
|
||||||
sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True
|
sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True
|
||||||
@ -3283,11 +3159,6 @@ class BatchImport(Base, ModelMixin):
|
|||||||
file = orm.relationship(File)
|
file = orm.relationship(File)
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_batch_import_file_id", "file_id"),
|
|
||||||
sa.Index("ix_batch_import_user_id", "user_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def nb_alias(self):
|
def nb_alias(self):
|
||||||
return Alias.filter_by(batch_import_id=self.id).count()
|
return Alias.filter_by(batch_import_id=self.id).count()
|
||||||
|
|
||||||
@ -3308,7 +3179,6 @@ class AuthorizedAddress(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
|
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
|
||||||
sa.Index("ix_authorized_address_user_id", "user_id"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
|
mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
|
||||||
@ -3450,8 +3320,6 @@ class Payout(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_payout_user_id", "user_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class IgnoredEmail(Base, ModelMixin):
|
class IgnoredEmail(Base, ModelMixin):
|
||||||
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
|
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
|
||||||
@ -3553,8 +3421,6 @@ class PhoneReservation(Base, ModelMixin):
|
|||||||
start = sa.Column(ArrowType, nullable=False)
|
start = sa.Column(ArrowType, nullable=False)
|
||||||
end = sa.Column(ArrowType, nullable=False)
|
end = sa.Column(ArrowType, nullable=False)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_phone_reservation_user_id", "user_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class PhoneMessage(Base, ModelMixin):
|
class PhoneMessage(Base, ModelMixin):
|
||||||
__tablename__ = "phone_message"
|
__tablename__ = "phone_message"
|
||||||
@ -3729,11 +3595,6 @@ class ProviderComplaint(Base, ModelMixin):
|
|||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_provider_complaint_user_id", "user_id"),
|
|
||||||
sa.Index("ix_provider_complaint_refused_email_id", "refused_email_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PartnerApiToken(Base, ModelMixin):
|
class PartnerApiToken(Base, ModelMixin):
|
||||||
__tablename__ = "partner_api_token"
|
__tablename__ = "partner_api_token"
|
||||||
@ -3777,8 +3638,7 @@ class PartnerUser(Base, ModelMixin):
|
|||||||
index=True,
|
index=True,
|
||||||
)
|
)
|
||||||
partner_id = sa.Column(
|
partner_id = sa.Column(
|
||||||
sa.ForeignKey("partner.id", ondelete="cascade"),
|
sa.ForeignKey("partner.id", ondelete="cascade"), nullable=False, index=True
|
||||||
nullable=False,
|
|
||||||
)
|
)
|
||||||
external_user_id = sa.Column(sa.String(128), unique=False, nullable=False)
|
external_user_id = sa.Column(sa.String(128), unique=False, nullable=False)
|
||||||
partner_email = sa.Column(sa.String(255), unique=False, nullable=True)
|
partner_email = sa.Column(sa.String(255), unique=False, nullable=True)
|
||||||
@ -3805,8 +3665,7 @@ class PartnerSubscription(Base, ModelMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# when the partner subscription ends
|
# when the partner subscription ends
|
||||||
end_at = sa.Column(ArrowType, nullable=True, index=True)
|
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
||||||
lifetime = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
|
||||||
|
|
||||||
partner_user = orm.relationship(PartnerUser)
|
partner_user = orm.relationship(PartnerUser)
|
||||||
|
|
||||||
@ -3828,9 +3687,7 @@ class PartnerSubscription(Base, ModelMixin):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def is_active(self):
|
def is_active(self):
|
||||||
return self.lifetime or self.end_at > arrow.now().shift(
|
return self.end_at > arrow.now().shift(days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS)
|
||||||
days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# endregion
|
# endregion
|
||||||
@ -3861,8 +3718,6 @@ class NewsletterUser(Base, ModelMixin):
|
|||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
newsletter = orm.relationship(Newsletter)
|
newsletter = orm.relationship(Newsletter)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_newsletter_user_user_id", "user_id"),)
|
|
||||||
|
|
||||||
|
|
||||||
class ApiToCookieToken(Base, ModelMixin):
|
class ApiToCookieToken(Base, ModelMixin):
|
||||||
__tablename__ = "api_cookie_token"
|
__tablename__ = "api_cookie_token"
|
||||||
@ -3873,11 +3728,6 @@ class ApiToCookieToken(Base, ModelMixin):
|
|||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
api_key = orm.relationship(ApiKey)
|
api_key = orm.relationship(ApiKey)
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_api_to_cookie_token_api_key_id", "api_key_id"),
|
|
||||||
sa.Index("ix_api_to_cookie_token_user_id", "user_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, **kwargs):
|
def create(cls, **kwargs):
|
||||||
code = secrets.token_urlsafe(32)
|
code = secrets.token_urlsafe(32)
|
||||||
@ -3900,19 +3750,17 @@ class SyncEvent(Base, ModelMixin):
|
|||||||
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def mark_as_taken(self, allow_taken_older_than: Optional[Arrow] = None) -> bool:
|
def mark_as_taken(self) -> bool:
|
||||||
try:
|
sql = """
|
||||||
taken_condition = ["taken_time IS NULL"]
|
UPDATE sync_event
|
||||||
|
SET taken_time = :taken_time
|
||||||
|
WHERE id = :sync_event_id
|
||||||
|
AND taken_time IS NULL
|
||||||
|
"""
|
||||||
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||||
if allow_taken_older_than:
|
|
||||||
taken_condition.append("taken_time < :taken_older_than")
|
|
||||||
args["taken_older_than"] = allow_taken_older_than.datetime
|
|
||||||
sql_taken_condition = "({})".format(" OR ".join(taken_condition))
|
|
||||||
sql = f"UPDATE sync_event SET taken_time = :taken_time WHERE id = :sync_event_id AND {sql_taken_condition}"
|
|
||||||
res = Session.execute(sql, args)
|
res = Session.execute(sql, args)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
except ObjectDeletedError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return res.rowcount > 0
|
return res.rowcount > 0
|
||||||
|
|
||||||
@ -3936,39 +3784,3 @@ class SyncEvent(Base, ModelMixin):
|
|||||||
.limit(100)
|
.limit(100)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AliasAuditLog(Base, ModelMixin):
|
|
||||||
"""This model holds an audit log for all the actions performed to an alias"""
|
|
||||||
|
|
||||||
__tablename__ = "alias_audit_log"
|
|
||||||
|
|
||||||
user_id = sa.Column(sa.Integer, nullable=False)
|
|
||||||
alias_id = sa.Column(sa.Integer, nullable=False)
|
|
||||||
alias_email = sa.Column(sa.String(255), nullable=False)
|
|
||||||
action = sa.Column(sa.String(255), nullable=False)
|
|
||||||
message = sa.Column(sa.Text, default=None, nullable=True)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_alias_audit_log_user_id", "user_id"),
|
|
||||||
sa.Index("ix_alias_audit_log_alias_id", "alias_id"),
|
|
||||||
sa.Index("ix_alias_audit_log_alias_email", "alias_email"),
|
|
||||||
sa.Index("ix_alias_audit_log_created_at", "created_at"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UserAuditLog(Base, ModelMixin):
|
|
||||||
"""This model holds an audit log for all the actions performed by a user"""
|
|
||||||
|
|
||||||
__tablename__ = "user_audit_log"
|
|
||||||
|
|
||||||
user_id = sa.Column(sa.Integer, nullable=False)
|
|
||||||
user_email = sa.Column(sa.String(255), nullable=False)
|
|
||||||
action = sa.Column(sa.String(255), nullable=False)
|
|
||||||
message = sa.Column(sa.Text, default=None, nullable=True)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index("ix_user_audit_log_user_id", "user_id"),
|
|
||||||
sa.Index("ix_user_audit_log_user_email", "user_email"),
|
|
||||||
sa.Index("ix_user_audit_log_created_at", "created_at"),
|
|
||||||
)
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from app.build_info import SHA1, VERSION
|
from app.build_info import SHA1
|
||||||
from app.monitor.base import monitor_bp
|
from app.monitor.base import monitor_bp
|
||||||
|
|
||||||
|
|
||||||
@ -7,11 +7,6 @@ def git_sha1():
|
|||||||
return SHA1
|
return SHA1
|
||||||
|
|
||||||
|
|
||||||
@monitor_bp.route("/version")
|
|
||||||
def version():
|
|
||||||
return VERSION
|
|
||||||
|
|
||||||
|
|
||||||
@monitor_bp.route("/live")
|
@monitor_bp.route("/live")
|
||||||
def live():
|
def live():
|
||||||
return "live"
|
return "live"
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
from app.build_info import VERSION
|
|
||||||
import newrelic.agent
|
|
||||||
|
|
||||||
|
|
||||||
def send_version_event(service: str):
|
|
||||||
newrelic.agent.record_custom_event(
|
|
||||||
"ServiceVersion", {"service": service, "version": VERSION}
|
|
||||||
)
|
|
@ -1,55 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
import arrow
|
|
||||||
from arrow import Arrow
|
|
||||||
|
|
||||||
from app.constants import JobType
|
|
||||||
from app.models import PartnerUser, PartnerSubscription, User, Job
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
def create_partner_user(
|
|
||||||
user: User, partner_id: int, partner_email: str, external_user_id: str
|
|
||||||
) -> PartnerUser:
|
|
||||||
instance = PartnerUser.create(
|
|
||||||
user_id=user.id,
|
|
||||||
partner_id=partner_id,
|
|
||||||
partner_email=partner_email,
|
|
||||||
external_user_id=external_user_id,
|
|
||||||
)
|
|
||||||
Job.create(
|
|
||||||
name=JobType.SEND_ALIAS_CREATION_EVENTS.value,
|
|
||||||
payload={"user_id": user.id},
|
|
||||||
run_at=arrow.now(),
|
|
||||||
)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.LinkAccount,
|
|
||||||
message=f"Linked account to partner_id={partner_id} | partner_email={partner_email} | external_user_id={external_user_id}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return instance
|
|
||||||
|
|
||||||
|
|
||||||
def create_partner_subscription(
|
|
||||||
partner_user: PartnerUser,
|
|
||||||
expiration: Optional[Arrow] = None,
|
|
||||||
lifetime: bool = False,
|
|
||||||
msg: Optional[str] = None,
|
|
||||||
) -> PartnerSubscription:
|
|
||||||
instance = PartnerSubscription.create(
|
|
||||||
partner_user_id=partner_user.id,
|
|
||||||
end_at=expiration,
|
|
||||||
lifetime=lifetime,
|
|
||||||
)
|
|
||||||
|
|
||||||
message = "User upgraded through partner subscription"
|
|
||||||
if msg:
|
|
||||||
message += f" | {msg}"
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=partner_user.user,
|
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message=message,
|
|
||||||
)
|
|
||||||
|
|
||||||
return instance
|
|
@ -1,121 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
import arrow
|
|
||||||
|
|
||||||
from coinbase_commerce.error import WebhookInvalidPayload, SignatureVerificationError
|
|
||||||
from coinbase_commerce.webhook import Webhook
|
|
||||||
from flask import Flask, request
|
|
||||||
|
|
||||||
from app.config import COINBASE_WEBHOOK_SECRET
|
|
||||||
from app.db import Session
|
|
||||||
from app.email_utils import send_email, render
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import CoinbaseSubscription, User
|
|
||||||
from app.subscription_webhook import execute_subscription_webhook
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
def setup_coinbase_commerce(app: Flask):
|
|
||||||
@app.route("/coinbase", methods=["POST"])
|
|
||||||
def coinbase_webhook():
|
|
||||||
# event payload
|
|
||||||
request_data = request.data.decode("utf-8")
|
|
||||||
# webhook signature
|
|
||||||
request_sig = request.headers.get("X-CC-Webhook-Signature", None)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# signature verification and event object construction
|
|
||||||
event = Webhook.construct_event(
|
|
||||||
request_data, request_sig, COINBASE_WEBHOOK_SECRET
|
|
||||||
)
|
|
||||||
except (WebhookInvalidPayload, SignatureVerificationError) as e:
|
|
||||||
LOG.e("Invalid Coinbase webhook")
|
|
||||||
return str(e), 400
|
|
||||||
|
|
||||||
LOG.d("Coinbase event %s", event)
|
|
||||||
|
|
||||||
if event["type"] == "charge:confirmed":
|
|
||||||
if handle_coinbase_event(event):
|
|
||||||
return "success", 200
|
|
||||||
else:
|
|
||||||
return "error", 400
|
|
||||||
|
|
||||||
return "success", 200
|
|
||||||
|
|
||||||
|
|
||||||
def handle_coinbase_event(event) -> bool:
|
|
||||||
server_user_id = event["data"]["metadata"]["user_id"]
|
|
||||||
try:
|
|
||||||
user_id = int(server_user_id)
|
|
||||||
except ValueError:
|
|
||||||
user_id = int(float(server_user_id))
|
|
||||||
|
|
||||||
code = event["data"]["code"]
|
|
||||||
user: Optional[User] = User.get(user_id)
|
|
||||||
if not user:
|
|
||||||
LOG.e("User not found %s", user_id)
|
|
||||||
return False
|
|
||||||
|
|
||||||
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
|
|
||||||
user_id=user_id
|
|
||||||
)
|
|
||||||
|
|
||||||
if not coinbase_subscription:
|
|
||||||
LOG.d("Create a coinbase subscription for %s", user)
|
|
||||||
coinbase_subscription = CoinbaseSubscription.create(
|
|
||||||
user_id=user_id, end_at=arrow.now().shift(years=1), code=code, commit=True
|
|
||||||
)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message="Upgraded though Coinbase",
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
send_email(
|
|
||||||
user.email,
|
|
||||||
"Your SimpleLogin account has been upgraded",
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/new-subscription.txt",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/new-subscription.html",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if coinbase_subscription.code != code:
|
|
||||||
LOG.d("Update code from %s to %s", coinbase_subscription.code, code)
|
|
||||||
coinbase_subscription.code = code
|
|
||||||
|
|
||||||
if coinbase_subscription.is_active():
|
|
||||||
coinbase_subscription.end_at = coinbase_subscription.end_at.shift(years=1)
|
|
||||||
else: # already expired subscription
|
|
||||||
coinbase_subscription.end_at = arrow.now().shift(years=1)
|
|
||||||
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.SubscriptionExtended,
|
|
||||||
message="Extended coinbase subscription",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
user.email,
|
|
||||||
"Your SimpleLogin account has been extended",
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/extend-subscription.txt",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/extend-subscription.html",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
execute_subscription_webhook(user)
|
|
||||||
|
|
||||||
return True
|
|
@ -1,286 +0,0 @@
|
|||||||
import arrow
|
|
||||||
import json
|
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
|
|
||||||
|
|
||||||
from flask import Flask, request
|
|
||||||
|
|
||||||
from app import paddle_utils, paddle_callback
|
|
||||||
from app.config import (
|
|
||||||
PADDLE_MONTHLY_PRODUCT_ID,
|
|
||||||
PADDLE_MONTHLY_PRODUCT_IDS,
|
|
||||||
PADDLE_YEARLY_PRODUCT_IDS,
|
|
||||||
PADDLE_COUPON_ID,
|
|
||||||
)
|
|
||||||
from app.db import Session
|
|
||||||
from app.email_utils import send_email, render
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import Subscription, PlanEnum, User, Coupon
|
|
||||||
from app.subscription_webhook import execute_subscription_webhook
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import random_string
|
|
||||||
|
|
||||||
|
|
||||||
def setup_paddle_callback(app: Flask):
|
|
||||||
@app.route("/paddle", methods=["GET", "POST"])
|
|
||||||
def paddle():
|
|
||||||
LOG.d(f"paddle callback {request.form.get('alert_name')} {request.form}")
|
|
||||||
|
|
||||||
# make sure the request comes from Paddle
|
|
||||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
|
||||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
|
||||||
return "KO", 400
|
|
||||||
|
|
||||||
if (
|
|
||||||
request.form.get("alert_name") == "subscription_created"
|
|
||||||
): # new user subscribes
|
|
||||||
# the passthrough is json encoded, e.g.
|
|
||||||
# request.form.get("passthrough") = '{"user_id": 88 }'
|
|
||||||
passthrough = json.loads(request.form.get("passthrough"))
|
|
||||||
user_id = passthrough.get("user_id")
|
|
||||||
user = User.get(user_id)
|
|
||||||
|
|
||||||
subscription_plan_id = int(request.form.get("subscription_plan_id"))
|
|
||||||
|
|
||||||
if subscription_plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
|
||||||
plan = PlanEnum.monthly
|
|
||||||
elif subscription_plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
|
||||||
plan = PlanEnum.yearly
|
|
||||||
else:
|
|
||||||
LOG.e(
|
|
||||||
"Unknown subscription_plan_id %s %s",
|
|
||||||
subscription_plan_id,
|
|
||||||
request.form,
|
|
||||||
)
|
|
||||||
return "No such subscription", 400
|
|
||||||
|
|
||||||
sub = Subscription.get_by(user_id=user.id)
|
|
||||||
|
|
||||||
if not sub:
|
|
||||||
LOG.d(f"create a new Subscription for user {user}")
|
|
||||||
Subscription.create(
|
|
||||||
user_id=user.id,
|
|
||||||
cancel_url=request.form.get("cancel_url"),
|
|
||||||
update_url=request.form.get("update_url"),
|
|
||||||
subscription_id=request.form.get("subscription_id"),
|
|
||||||
event_time=arrow.now(),
|
|
||||||
next_bill_date=arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date(),
|
|
||||||
plan=plan,
|
|
||||||
)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.Upgrade,
|
|
||||||
message="Upgraded through Paddle",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
LOG.d(f"Update an existing Subscription for user {user}")
|
|
||||||
sub.cancel_url = request.form.get("cancel_url")
|
|
||||||
sub.update_url = request.form.get("update_url")
|
|
||||||
sub.subscription_id = request.form.get("subscription_id")
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
sub.next_bill_date = arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date()
|
|
||||||
sub.plan = plan
|
|
||||||
|
|
||||||
# make sure to set the new plan as not-cancelled
|
|
||||||
# in case user cancels a plan and subscribes a new plan
|
|
||||||
sub.cancelled = False
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.SubscriptionExtended,
|
|
||||||
message="Extended Paddle subscription",
|
|
||||||
)
|
|
||||||
|
|
||||||
execute_subscription_webhook(user)
|
|
||||||
LOG.d("User %s upgrades!", user)
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_payment_succeeded":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
LOG.d("Update subscription %s", subscription_id)
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
# when user subscribes, the "subscription_payment_succeeded" can arrive BEFORE "subscription_created"
|
|
||||||
# at that time, subscription object does not exist yet
|
|
||||||
if sub:
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
sub.next_bill_date = arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date()
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_cancelled":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
if sub:
|
|
||||||
# cancellation_effective_date should be the same as next_bill_date
|
|
||||||
LOG.w(
|
|
||||||
"Cancel subscription %s %s on %s, next bill date %s",
|
|
||||||
subscription_id,
|
|
||||||
sub.user,
|
|
||||||
request.form.get("cancellation_effective_date"),
|
|
||||||
sub.next_bill_date,
|
|
||||||
)
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
|
|
||||||
sub.cancelled = True
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=sub.user,
|
|
||||||
action=UserAuditLogAction.SubscriptionCancelled,
|
|
||||||
message="Cancelled Paddle subscription",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
user = sub.user
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
user.email,
|
|
||||||
"SimpleLogin - your subscription is canceled",
|
|
||||||
render(
|
|
||||||
"transactional/subscription-cancel.txt",
|
|
||||||
user=user,
|
|
||||||
end_date=request.form.get("cancellation_effective_date"),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# user might have deleted their account
|
|
||||||
LOG.i(f"Cancel non-exist subscription {subscription_id}")
|
|
||||||
return "OK"
|
|
||||||
elif request.form.get("alert_name") == "subscription_updated":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
if sub:
|
|
||||||
next_bill_date = request.form.get("next_bill_date")
|
|
||||||
if not next_bill_date:
|
|
||||||
paddle_callback.failed_payment(sub, subscription_id)
|
|
||||||
return "OK"
|
|
||||||
|
|
||||||
LOG.d(
|
|
||||||
"Update subscription %s %s on %s, next bill date %s",
|
|
||||||
subscription_id,
|
|
||||||
sub.user,
|
|
||||||
request.form.get("cancellation_effective_date"),
|
|
||||||
sub.next_bill_date,
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
int(request.form.get("subscription_plan_id"))
|
|
||||||
== PADDLE_MONTHLY_PRODUCT_ID
|
|
||||||
):
|
|
||||||
plan = PlanEnum.monthly
|
|
||||||
else:
|
|
||||||
plan = PlanEnum.yearly
|
|
||||||
|
|
||||||
sub.cancel_url = request.form.get("cancel_url")
|
|
||||||
sub.update_url = request.form.get("update_url")
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
sub.next_bill_date = arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date()
|
|
||||||
sub.plan = plan
|
|
||||||
|
|
||||||
# make sure to set the new plan as not-cancelled
|
|
||||||
sub.cancelled = False
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=sub.user,
|
|
||||||
action=UserAuditLogAction.SubscriptionExtended,
|
|
||||||
message="Extended Paddle subscription",
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
else:
|
|
||||||
LOG.w(
|
|
||||||
f"update non-exist subscription {subscription_id}. {request.form}"
|
|
||||||
)
|
|
||||||
return "No such subscription", 400
|
|
||||||
elif request.form.get("alert_name") == "payment_refunded":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
LOG.d("Refund request for subscription %s", subscription_id)
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
|
|
||||||
if sub:
|
|
||||||
user = sub.user
|
|
||||||
Subscription.delete(sub.id)
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.SubscriptionCancelled,
|
|
||||||
message="Paddle subscription cancelled as user requested a refund",
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
LOG.e("%s requests a refund", user)
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
LOG.d(
|
|
||||||
"Handle subscription_payment_refunded for subscription %s",
|
|
||||||
subscription_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not sub:
|
|
||||||
LOG.w(
|
|
||||||
"No such subscription for %s, payload %s",
|
|
||||||
subscription_id,
|
|
||||||
request.form,
|
|
||||||
)
|
|
||||||
return "No such subscription"
|
|
||||||
|
|
||||||
plan_id = int(request.form["subscription_plan_id"])
|
|
||||||
if request.form["refund_type"] == "full":
|
|
||||||
if plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
|
||||||
LOG.d("subtract 1 month from next_bill_date %s", sub.next_bill_date)
|
|
||||||
sub.next_bill_date = sub.next_bill_date - relativedelta(months=1)
|
|
||||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
|
||||||
Session.commit()
|
|
||||||
elif plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
|
||||||
LOG.d("subtract 1 year from next_bill_date %s", sub.next_bill_date)
|
|
||||||
sub.next_bill_date = sub.next_bill_date - relativedelta(years=1)
|
|
||||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
|
||||||
Session.commit()
|
|
||||||
else:
|
|
||||||
LOG.e("Unknown plan_id %s", plan_id)
|
|
||||||
else:
|
|
||||||
LOG.w("partial subscription_payment_refunded, not handled")
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
return "OK"
|
|
||||||
|
|
||||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
|
||||||
def paddle_coupon():
|
|
||||||
LOG.d("paddle coupon callback %s", request.form)
|
|
||||||
|
|
||||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
|
||||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
|
||||||
return "KO", 400
|
|
||||||
|
|
||||||
product_id = request.form.get("p_product_id")
|
|
||||||
if product_id != PADDLE_COUPON_ID:
|
|
||||||
LOG.e("product_id %s not match with %s", product_id, PADDLE_COUPON_ID)
|
|
||||||
return "KO", 400
|
|
||||||
|
|
||||||
email = request.form.get("email")
|
|
||||||
LOG.d("Paddle coupon request for %s", email)
|
|
||||||
|
|
||||||
coupon = Coupon.create(
|
|
||||||
code=random_string(30),
|
|
||||||
comment="For 1-year coupon",
|
|
||||||
expires_date=arrow.now().shift(years=1, days=-1),
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return (
|
|
||||||
f"Your 1-year coupon is <b>{coupon.code}</b> <br> "
|
|
||||||
f"It's valid until <b>{coupon.expires_date.date().isoformat()}</b>"
|
|
||||||
)
|
|
@ -16,7 +16,6 @@ PROTON_ERROR_CODE_HV_NEEDED = 9001
|
|||||||
|
|
||||||
PLAN_FREE = 1
|
PLAN_FREE = 1
|
||||||
PLAN_PREMIUM = 2
|
PLAN_PREMIUM = 2
|
||||||
PLAN_PREMIUM_LIFETIME = 3
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -113,13 +112,10 @@ class HttpProtonClient(ProtonClient):
|
|||||||
if plan_value == PLAN_FREE:
|
if plan_value == PLAN_FREE:
|
||||||
plan = SLPlan(type=SLPlanType.Free, expiration=None)
|
plan = SLPlan(type=SLPlanType.Free, expiration=None)
|
||||||
elif plan_value == PLAN_PREMIUM:
|
elif plan_value == PLAN_PREMIUM:
|
||||||
expiration = info.get("PlanExpiration", "1")
|
|
||||||
plan = SLPlan(
|
plan = SLPlan(
|
||||||
type=SLPlanType.Premium,
|
type=SLPlanType.Premium,
|
||||||
expiration=Arrow.fromtimestamp(expiration, tzinfo="utc"),
|
expiration=Arrow.fromtimestamp(info["PlanExpiration"], tzinfo="utc"),
|
||||||
)
|
)
|
||||||
elif plan_value == PLAN_PREMIUM_LIFETIME:
|
|
||||||
plan = SLPlan(SLPlanType.PremiumLifetime, expiration=None)
|
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Invalid value for plan: {plan_value}")
|
raise Exception(f"Invalid value for plan: {plan_value}")
|
||||||
|
|
||||||
|
@ -1,39 +0,0 @@
|
|||||||
from newrelic import agent
|
|
||||||
|
|
||||||
from app.db import Session
|
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
|
||||||
from app.events.generated.event_pb2 import EventContent, UserUnlinked
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import User, PartnerUser
|
|
||||||
from app.proton.proton_partner import get_proton_partner
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
def can_unlink_proton_account(user: User) -> bool:
|
|
||||||
return (user.flags & User.FLAG_CREATED_FROM_PARTNER) == 0
|
|
||||||
|
|
||||||
|
|
||||||
def perform_proton_account_unlink(
|
|
||||||
current_user: User, skip_check: bool = False
|
|
||||||
) -> None | str:
|
|
||||||
if not skip_check and not can_unlink_proton_account(current_user):
|
|
||||||
return None
|
|
||||||
proton_partner = get_proton_partner()
|
|
||||||
partner_user = PartnerUser.get_by(
|
|
||||||
user_id=current_user.id, partner_id=proton_partner.id
|
|
||||||
)
|
|
||||||
if partner_user is not None:
|
|
||||||
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=current_user,
|
|
||||||
action=UserAuditLogAction.UnlinkAccount,
|
|
||||||
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
|
|
||||||
)
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
partner_user.user, EventContent(user_unlinked=UserUnlinked())
|
|
||||||
)
|
|
||||||
PartnerUser.delete(partner_user.id)
|
|
||||||
external_user_id = partner_user.external_user_id
|
|
||||||
Session.commit()
|
|
||||||
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
|
||||||
return external_user_id
|
|
@ -1,8 +1,10 @@
|
|||||||
|
from newrelic import agent
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.log import LOG
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
from app.models import Partner
|
from app.models import Partner, PartnerUser, User
|
||||||
|
|
||||||
PROTON_PARTNER_NAME = "Proton"
|
PROTON_PARTNER_NAME = "Proton"
|
||||||
_PROTON_PARTNER: Optional[Partner] = None
|
_PROTON_PARTNER: Optional[Partner] = None
|
||||||
@ -21,3 +23,15 @@ def get_proton_partner() -> Partner:
|
|||||||
|
|
||||||
def is_proton_partner(partner: Partner) -> bool:
|
def is_proton_partner(partner: Partner) -> bool:
|
||||||
return partner.name == PROTON_PARTNER_NAME
|
return partner.name == PROTON_PARTNER_NAME
|
||||||
|
|
||||||
|
|
||||||
|
def perform_proton_account_unlink(current_user: User):
|
||||||
|
proton_partner = get_proton_partner()
|
||||||
|
partner_user = PartnerUser.get_by(
|
||||||
|
user_id=current_user.id, partner_id=proton_partner.id
|
||||||
|
)
|
||||||
|
if partner_user is not None:
|
||||||
|
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
|
||||||
|
PartnerUser.delete(partner_user.id)
|
||||||
|
Session.commit()
|
||||||
|
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
@ -1,6 +0,0 @@
|
|||||||
from random import randbytes
|
|
||||||
from base64 import b64encode
|
|
||||||
|
|
||||||
|
|
||||||
def generate_request_id() -> str:
|
|
||||||
return b64encode(randbytes(6)).decode()
|
|
@ -1,21 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sentry_sdk.types import Event, Hint
|
|
||||||
|
|
||||||
_HTTP_CODES_TO_IGNORE = [416]
|
|
||||||
|
|
||||||
|
|
||||||
def _should_send(_event: Event, hint: Hint) -> bool:
|
|
||||||
# Check if this is an HTTP Exception event
|
|
||||||
if "exc_info" in hint:
|
|
||||||
exc_type, exc_value, exc_traceback = hint["exc_info"]
|
|
||||||
# Check if it's a Werkzeug HTTPException (raised for HTTP status codes)
|
|
||||||
if hasattr(exc_value, "code") and exc_value.code in _HTTP_CODES_TO_IGNORE:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def sentry_before_send(event: Event, hint: Hint) -> Optional[Event]:
|
|
||||||
if _should_send(event, hint):
|
|
||||||
return event
|
|
||||||
return None
|
|
@ -1,7 +1,6 @@
|
|||||||
"""Inspired from
|
"""Inspired from
|
||||||
https://github.com/petermat/spamassassin_client
|
https://github.com/petermat/spamassassin_client
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
@ -1,16 +1,40 @@
|
|||||||
|
import requests
|
||||||
|
from requests import RequestException
|
||||||
|
|
||||||
|
from app import config
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||||
|
from app.log import LOG
|
||||||
from app.models import User
|
from app.models import User
|
||||||
|
|
||||||
|
|
||||||
def execute_subscription_webhook(user: User):
|
def execute_subscription_webhook(user: User):
|
||||||
|
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
|
||||||
|
if webhook_url is None:
|
||||||
|
return
|
||||||
subscription_end = user.get_active_subscription_end(
|
subscription_end = user.get_active_subscription_end(
|
||||||
include_partner_subscription=False
|
include_partner_subscription=False
|
||||||
)
|
)
|
||||||
sl_subscription_end = None
|
sl_subscription_end = None
|
||||||
if subscription_end:
|
if subscription_end:
|
||||||
sl_subscription_end = subscription_end.timestamp
|
sl_subscription_end = subscription_end.timestamp
|
||||||
|
payload = {
|
||||||
|
"user_id": user.id,
|
||||||
|
"is_premium": user.is_premium(),
|
||||||
|
"active_subscription_end": sl_subscription_end,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
response = requests.post(webhook_url, json=payload, timeout=2)
|
||||||
|
if response.status_code == 200:
|
||||||
|
LOG.i("Sent request to subscription update webhook successfully")
|
||||||
|
else:
|
||||||
|
LOG.i(
|
||||||
|
f"Request to webhook failed with status {response.status_code}: {response.text}"
|
||||||
|
)
|
||||||
|
except RequestException as e:
|
||||||
|
LOG.error(f"Subscription request exception: {e}")
|
||||||
|
|
||||||
event = UserPlanChanged(plan_end_time=sl_subscription_end)
|
event = UserPlanChanged(plan_end_time=sl_subscription_end)
|
||||||
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
from app.models import User, UserAuditLog
|
|
||||||
|
|
||||||
|
|
||||||
class UserAuditLogAction(Enum):
|
|
||||||
CreateUser = "create_user"
|
|
||||||
ActivateUser = "activate_user"
|
|
||||||
ResetPassword = "reset_password"
|
|
||||||
|
|
||||||
Upgrade = "upgrade"
|
|
||||||
SubscriptionExtended = "subscription_extended"
|
|
||||||
SubscriptionCancelled = "subscription_cancelled"
|
|
||||||
LinkAccount = "link_account"
|
|
||||||
UnlinkAccount = "unlink_account"
|
|
||||||
|
|
||||||
CreateMailbox = "create_mailbox"
|
|
||||||
VerifyMailbox = "verify_mailbox"
|
|
||||||
UpdateMailbox = "update_mailbox"
|
|
||||||
DeleteMailbox = "delete_mailbox"
|
|
||||||
|
|
||||||
CreateCustomDomain = "create_custom_domain"
|
|
||||||
VerifyCustomDomain = "verify_custom_domain"
|
|
||||||
UpdateCustomDomain = "update_custom_domain"
|
|
||||||
DeleteCustomDomain = "delete_custom_domain"
|
|
||||||
|
|
||||||
CreateDirectory = "create_directory"
|
|
||||||
UpdateDirectory = "update_directory"
|
|
||||||
DeleteDirectory = "delete_directory"
|
|
||||||
|
|
||||||
UserMarkedForDeletion = "user_marked_for_deletion"
|
|
||||||
DeleteUser = "delete_user"
|
|
||||||
|
|
||||||
|
|
||||||
def emit_user_audit_log(
|
|
||||||
user: User, action: UserAuditLogAction, message: str, commit: bool = False
|
|
||||||
):
|
|
||||||
UserAuditLog.create(
|
|
||||||
user_id=user.id,
|
|
||||||
user_email=user.email,
|
|
||||||
action=action.value,
|
|
||||||
message=message,
|
|
||||||
commit=commit,
|
|
||||||
)
|
|
@ -3,7 +3,6 @@ from typing import Optional
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, SLDomain, CustomDomain, Mailbox
|
from app.models import User, SLDomain, CustomDomain, Mailbox
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
|
|
||||||
|
|
||||||
class CannotSetAlias(Exception):
|
class CannotSetAlias(Exception):
|
||||||
@ -55,7 +54,7 @@ def set_default_alias_domain(user: User, domain_name: Optional[str]):
|
|||||||
|
|
||||||
|
|
||||||
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
||||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
|
||||||
if not mailbox or mailbox.user_id != user.id:
|
if not mailbox or mailbox.user_id != user.id:
|
||||||
raise CannotSetMailbox("Invalid mailbox")
|
raise CannotSetMailbox("Invalid mailbox")
|
||||||
@ -68,11 +67,5 @@ def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
|||||||
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
|
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
|
||||||
|
|
||||||
user.default_mailbox_id = mailbox.id
|
user.default_mailbox_id = mailbox.id
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.UpdateMailbox,
|
|
||||||
message=f"Set mailbox {mailbox.id} ({mailbox.email}) as default",
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return mailbox
|
return mailbox
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import random
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
import string
|
import string
|
||||||
@ -31,9 +32,8 @@ def random_words(words: int = 2, numbers: int = 0):
|
|||||||
fields = [secrets.choice(_words) for i in range(words)]
|
fields = [secrets.choice(_words) for i in range(words)]
|
||||||
|
|
||||||
if numbers > 0:
|
if numbers > 0:
|
||||||
digits = [n for n in range(10)]
|
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)])
|
||||||
suffix = "".join([str(secrets.choice(digits)) for i in range(numbers)])
|
return "_".join(fields) + digits
|
||||||
return "_".join(fields) + suffix
|
|
||||||
else:
|
else:
|
||||||
return "_".join(fields)
|
return "_".join(fields)
|
||||||
|
|
||||||
|
94
app/cron.py
94
app/cron.py
@ -14,9 +14,8 @@ from sqlalchemy.sql import Insert, text
|
|||||||
from app import s3, config
|
from app import s3, config
|
||||||
from app.alias_utils import nb_email_log_for_mailbox
|
from app.alias_utils import nb_email_log_for_mailbox
|
||||||
from app.api.views.apple import verify_receipt
|
from app.api.views.apple import verify_receipt
|
||||||
from app.custom_domain_validation import CustomDomainValidation, is_mx_equivalent
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_mx_domains
|
from app.dns_utils import get_mx_domains, is_mx_equivalent
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
send_email,
|
send_email,
|
||||||
send_trial_end_soon_email,
|
send_trial_end_soon_email,
|
||||||
@ -59,12 +58,9 @@ from app.models import (
|
|||||||
ApiToCookieToken,
|
ApiToCookieToken,
|
||||||
)
|
)
|
||||||
from app.pgp_utils import load_public_key_and_check, PGPException
|
from app.pgp_utils import load_public_key_and_check, PGPException
|
||||||
from app.proton.proton_partner import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
from tasks.clean_alias_audit_log import cleanup_alias_audit_log
|
|
||||||
from tasks.clean_user_audit_log import cleanup_user_audit_log
|
|
||||||
from tasks.cleanup_old_imports import cleanup_old_imports
|
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||||
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||||
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||||
@ -286,16 +282,8 @@ def notify_manual_sub_end():
|
|||||||
|
|
||||||
def poll_apple_subscription():
|
def poll_apple_subscription():
|
||||||
"""Poll Apple API to update AppleSubscription"""
|
"""Poll Apple API to update AppleSubscription"""
|
||||||
for apple_sub in (
|
# todo: only near the end of the subscription
|
||||||
AppleSubscription.filter(
|
for apple_sub in AppleSubscription.all():
|
||||||
AppleSubscription.expires_date < arrow.now().shift(days=15)
|
|
||||||
)
|
|
||||||
.enable_eagerloads(False)
|
|
||||||
.yield_per(100)
|
|
||||||
):
|
|
||||||
if not apple_sub.is_valid():
|
|
||||||
# Subscription is not valid anymore and hasn't been renewed
|
|
||||||
continue
|
|
||||||
if not apple_sub.product_id:
|
if not apple_sub.product_id:
|
||||||
LOG.d("Ignore %s", apple_sub)
|
LOG.d("Ignore %s", apple_sub)
|
||||||
continue
|
continue
|
||||||
@ -908,24 +896,6 @@ def check_mailbox_valid_pgp_keys():
|
|||||||
|
|
||||||
|
|
||||||
def check_custom_domain():
|
def check_custom_domain():
|
||||||
# Delete custom domains that haven't been verified in a month
|
|
||||||
for custom_domain in (
|
|
||||||
CustomDomain.filter(
|
|
||||||
CustomDomain.verified == False, # noqa: E712
|
|
||||||
CustomDomain.created_at < arrow.now().shift(months=-1),
|
|
||||||
)
|
|
||||||
.enable_eagerloads(False)
|
|
||||||
.yield_per(100)
|
|
||||||
):
|
|
||||||
alias_count = Alias.filter(Alias.custom_domain_id == custom_domain.id).count()
|
|
||||||
if alias_count > 0:
|
|
||||||
LOG.warn(
|
|
||||||
f"Custom Domain {custom_domain} has {alias_count} aliases. Won't delete"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
LOG.i(f"Deleting unverified old custom domain {custom_domain}")
|
|
||||||
CustomDomain.delete(custom_domain.id)
|
|
||||||
|
|
||||||
LOG.d("Check verified domain for DNS issues")
|
LOG.d("Check verified domain for DNS issues")
|
||||||
|
|
||||||
for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain
|
for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain
|
||||||
@ -935,11 +905,9 @@ def check_custom_domain():
|
|||||||
LOG.i("custom domain has been deleted")
|
LOG.i("custom domain has been deleted")
|
||||||
|
|
||||||
|
|
||||||
def check_single_custom_domain(custom_domain: CustomDomain):
|
def check_single_custom_domain(custom_domain):
|
||||||
mx_domains = get_mx_domains(custom_domain.domain)
|
mx_domains = get_mx_domains(custom_domain.domain)
|
||||||
validator = CustomDomainValidation(dkim_domain=config.EMAIL_DOMAIN)
|
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY):
|
||||||
expected_custom_domains = validator.get_expected_mx_records(custom_domain)
|
|
||||||
if not is_mx_equivalent(mx_domains, expected_custom_domains):
|
|
||||||
user = custom_domain.user
|
user = custom_domain.user
|
||||||
LOG.w(
|
LOG.w(
|
||||||
"The MX record is not correctly set for %s %s %s",
|
"The MX record is not correctly set for %s %s %s",
|
||||||
@ -997,7 +965,7 @@ def delete_expired_tokens():
|
|||||||
LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row)
|
LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row)
|
||||||
|
|
||||||
|
|
||||||
async def _hibp_check(api_key: str, queue: asyncio.Queue):
|
async def _hibp_check(api_key, queue):
|
||||||
"""
|
"""
|
||||||
Uses a single API key to check the queue as fast as possible.
|
Uses a single API key to check the queue as fast as possible.
|
||||||
|
|
||||||
@ -1016,16 +984,11 @@ async def _hibp_check(api_key: str, queue: asyncio.Queue):
|
|||||||
if not alias:
|
if not alias:
|
||||||
continue
|
continue
|
||||||
user = alias.user
|
user = alias.user
|
||||||
if user.disabled or not user.is_premium():
|
if user.disabled or not user.is_paid():
|
||||||
# Mark it as hibp done to skip it as if it had been checked
|
# Mark it as hibp done to skip it as if it had been checked
|
||||||
alias.hibp_last_check = arrow.utcnow()
|
alias.hibp_last_check = arrow.utcnow()
|
||||||
Session.commit()
|
Session.commit()
|
||||||
continue
|
continue
|
||||||
if alias.flags & Alias.FLAG_PARTNER_CREATED > 0:
|
|
||||||
# Mark as hibp done
|
|
||||||
alias.hibp_last_check = arrow.utcnow()
|
|
||||||
Session.commit()
|
|
||||||
continue
|
|
||||||
|
|
||||||
LOG.d("Checking HIBP for %s", alias)
|
LOG.d("Checking HIBP for %s", alias)
|
||||||
|
|
||||||
@ -1252,7 +1215,7 @@ def notify_hibp():
|
|||||||
|
|
||||||
|
|
||||||
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||||
users: List[User] = User.filter(
|
users = User.filter(
|
||||||
and_(
|
and_(
|
||||||
User.delete_on.isnot(None),
|
User.delete_on.isnot(None),
|
||||||
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
||||||
@ -1264,11 +1227,6 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
|
|||||||
)
|
)
|
||||||
if dry_run:
|
if dry_run:
|
||||||
continue
|
continue
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.DeleteUser,
|
|
||||||
message=f"Delete user {user.id} ({user.email})",
|
|
||||||
)
|
|
||||||
User.delete(user.id)
|
User.delete(user.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -1280,16 +1238,6 @@ def delete_old_data():
|
|||||||
cleanup_old_notifications(oldest_valid)
|
cleanup_old_notifications(oldest_valid)
|
||||||
|
|
||||||
|
|
||||||
def clear_alias_audit_log():
|
|
||||||
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
|
|
||||||
cleanup_alias_audit_log(oldest_valid)
|
|
||||||
|
|
||||||
|
|
||||||
def clear_user_audit_log():
|
|
||||||
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
|
|
||||||
cleanup_user_audit_log(oldest_valid)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
LOG.d("Start running cronjob")
|
LOG.d("Start running cronjob")
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
@ -1298,6 +1246,22 @@ if __name__ == "__main__":
|
|||||||
"--job",
|
"--job",
|
||||||
help="Choose a cron job to run",
|
help="Choose a cron job to run",
|
||||||
type=str,
|
type=str,
|
||||||
|
choices=[
|
||||||
|
"stats",
|
||||||
|
"notify_trial_end",
|
||||||
|
"notify_manual_subscription_end",
|
||||||
|
"notify_premium_end",
|
||||||
|
"delete_logs",
|
||||||
|
"delete_old_data",
|
||||||
|
"poll_apple_subscription",
|
||||||
|
"sanity_check",
|
||||||
|
"delete_old_monitoring",
|
||||||
|
"check_custom_domain",
|
||||||
|
"check_hibp",
|
||||||
|
"notify_hibp",
|
||||||
|
"cleanup_tokens",
|
||||||
|
"send_undelivered_mails",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
||||||
@ -1346,10 +1310,4 @@ if __name__ == "__main__":
|
|||||||
load_unsent_mails_from_fs_and_resend()
|
load_unsent_mails_from_fs_and_resend()
|
||||||
elif args.job == "delete_scheduled_users":
|
elif args.job == "delete_scheduled_users":
|
||||||
LOG.d("Deleting users scheduled to be deleted")
|
LOG.d("Deleting users scheduled to be deleted")
|
||||||
clear_users_scheduled_to_be_deleted()
|
clear_users_scheduled_to_be_deleted(dry_run=True)
|
||||||
elif args.job == "clear_alias_audit_log":
|
|
||||||
LOG.d("Clearing alias audit log")
|
|
||||||
clear_alias_audit_log()
|
|
||||||
elif args.job == "clear_user_audit_log":
|
|
||||||
LOG.d("Clearing user audit log")
|
|
||||||
clear_user_audit_log()
|
|
||||||
|
@ -14,28 +14,15 @@ jobs:
|
|||||||
- name: SimpleLogin Custom Domain check
|
- name: SimpleLogin Custom Domain check
|
||||||
command: python /code/cron.py -j check_custom_domain
|
command: python /code/cron.py -j check_custom_domain
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 */4 * * *"
|
schedule: "15 2 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
|
||||||
onFailure:
|
|
||||||
retry:
|
|
||||||
maximumRetries: 10
|
|
||||||
initialDelay: 1
|
|
||||||
maximumDelay: 30
|
|
||||||
backoffMultiplier: 2
|
|
||||||
|
|
||||||
- name: SimpleLogin HIBP check
|
- name: SimpleLogin HIBP check
|
||||||
command: python /code/cron.py -j check_hibp
|
command: python /code/cron.py -j check_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "13 */4 * * *"
|
schedule: "15 3 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
onFailure:
|
|
||||||
retry:
|
|
||||||
maximumRetries: 10
|
|
||||||
initialDelay: 1
|
|
||||||
maximumDelay: 30
|
|
||||||
backoffMultiplier: 2
|
|
||||||
|
|
||||||
- name: SimpleLogin Notify HIBP breaches
|
- name: SimpleLogin Notify HIBP breaches
|
||||||
command: python /code/cron.py -j notify_hibp
|
command: python /code/cron.py -j notify_hibp
|
||||||
@ -44,7 +31,6 @@ jobs:
|
|||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
|
|
||||||
- name: SimpleLogin Delete Logs
|
- name: SimpleLogin Delete Logs
|
||||||
command: python /code/cron.py -j delete_logs
|
command: python /code/cron.py -j delete_logs
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
@ -94,17 +80,3 @@ jobs:
|
|||||||
schedule: "*/5 * * * *"
|
schedule: "*/5 * * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
- name: SimpleLogin clear alias_audit_log old entries
|
|
||||||
command: python /code/cron.py -j clear_alias_audit_log
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 * * * *" # Once every hour
|
|
||||||
captureStderr: true
|
|
||||||
concurrencyPolicy: Forbid
|
|
||||||
|
|
||||||
- name: SimpleLogin clear user_audit_log old entries
|
|
||||||
command: python /code/cron.py -j clear_user_audit_log
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 * * * *" # Once every hour
|
|
||||||
captureStderr: true
|
|
||||||
concurrencyPolicy: Forbid
|
|
||||||
|
@ -369,8 +369,8 @@ For ex:
|
|||||||
"is_premium": false
|
"is_premium": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"signed_suffix": ".yeah@sl.lan.X6_7OQ.i8XL4xsMsn7dxDEWU8eF-Zap0qo",
|
"signed_suffix": ".yeah@sl.local.X6_7OQ.i8XL4xsMsn7dxDEWU8eF-Zap0qo",
|
||||||
"suffix": ".yeah@sl.lan",
|
"suffix": ".yeah@sl.local",
|
||||||
"is_custom": true,
|
"is_custom": true,
|
||||||
"is_premium": false
|
"is_premium": false
|
||||||
}
|
}
|
||||||
@ -465,7 +465,7 @@ Here's an example:
|
|||||||
{
|
{
|
||||||
"creation_date": "2020-04-06 17:57:14+00:00",
|
"creation_date": "2020-04-06 17:57:14+00:00",
|
||||||
"creation_timestamp": 1586195834,
|
"creation_timestamp": 1586195834,
|
||||||
"email": "prefix1.cat@sl.lan",
|
"email": "prefix1.cat@sl.local",
|
||||||
"name": "A Name",
|
"name": "A Name",
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"id": 3,
|
"id": 3,
|
||||||
@ -518,7 +518,7 @@ Alias info, use the same format as in /api/v2/aliases. For example:
|
|||||||
{
|
{
|
||||||
"creation_date": "2020-04-06 17:57:14+00:00",
|
"creation_date": "2020-04-06 17:57:14+00:00",
|
||||||
"creation_timestamp": 1586195834,
|
"creation_timestamp": 1586195834,
|
||||||
"email": "prefix1.cat@sl.lan",
|
"email": "prefix1.cat@sl.local",
|
||||||
"name": "A Name",
|
"name": "A Name",
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"id": 3,
|
"id": 3,
|
||||||
@ -608,7 +608,7 @@ If success, 200 with the list of activities, for example:
|
|||||||
"activities": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"action": "reply",
|
"action": "reply",
|
||||||
"from": "yes_meo_chat@sl.lan",
|
"from": "yes_meo_chat@sl.local",
|
||||||
"timestamp": 1580903760,
|
"timestamp": 1580903760,
|
||||||
"to": "marketing@example.com",
|
"to": "marketing@example.com",
|
||||||
"reverse_alias": "\"marketing at example.com\" <reply@a.b>",
|
"reverse_alias": "\"marketing at example.com\" <reply@a.b>",
|
||||||
@ -703,7 +703,7 @@ Return 200 and `existed=true` if contact is already added.
|
|||||||
"creation_timestamp": 1584186761,
|
"creation_timestamp": 1584186761,
|
||||||
"last_email_sent_date": null,
|
"last_email_sent_date": null,
|
||||||
"last_email_sent_timestamp": null,
|
"last_email_sent_timestamp": null,
|
||||||
"reverse_alias": "First Last first@example.com <ra+qytyzjhrumrreuszrbjxqjlkh@sl.lan>",
|
"reverse_alias": "First Last first@example.com <ra+qytyzjhrumrreuszrbjxqjlkh@sl.local>",
|
||||||
"reverse_alias_address": "reply+bzvpazcdedcgcpztehxzgjgzmxskqa@sl.co",
|
"reverse_alias_address": "reply+bzvpazcdedcgcpztehxzgjgzmxskqa@sl.co",
|
||||||
"existed": false
|
"existed": false
|
||||||
}
|
}
|
||||||
@ -992,7 +992,7 @@ Return user setting.
|
|||||||
{
|
{
|
||||||
"alias_generator": "word",
|
"alias_generator": "word",
|
||||||
"notification": true,
|
"notification": true,
|
||||||
"random_alias_default_domain": "sl.lan",
|
"random_alias_default_domain": "sl.local",
|
||||||
"sender_format": "AT",
|
"sender_format": "AT",
|
||||||
"random_alias_suffix": "random_string"
|
"random_alias_suffix": "random_string"
|
||||||
}
|
}
|
||||||
@ -1029,7 +1029,7 @@ Return domains that user can use to create random alias
|
|||||||
"is_custom": false
|
"is_custom": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"domain": "sl.lan",
|
"domain": "sl.local",
|
||||||
"is_custom": false
|
"is_custom": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -30,7 +30,6 @@ It should contain the following info:
|
|||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import email
|
import email
|
||||||
import time
|
import time
|
||||||
@ -53,12 +52,8 @@ from flanker.addresslib import address
|
|||||||
from flanker.addresslib.address import EmailAddress
|
from flanker.addresslib.address import EmailAddress
|
||||||
from sqlalchemy.exc import IntegrityError
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
from app import pgp_utils, s3, config, contact_utils
|
from app import pgp_utils, s3, config
|
||||||
from app.alias_utils import (
|
from app.alias_utils import try_auto_create, change_alias_status
|
||||||
try_auto_create,
|
|
||||||
change_alias_status,
|
|
||||||
get_alias_recipient_name,
|
|
||||||
)
|
|
||||||
from app.config import (
|
from app.config import (
|
||||||
EMAIL_DOMAIN,
|
EMAIL_DOMAIN,
|
||||||
URL,
|
URL,
|
||||||
@ -150,7 +145,6 @@ from app.handler.unsubscribe_generator import UnsubscribeGenerator
|
|||||||
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
||||||
from app.log import LOG, set_message_id
|
from app.log import LOG, set_message_id
|
||||||
from app.mail_sender import sl_sendmail
|
from app.mail_sender import sl_sendmail
|
||||||
from app.mailbox_utils import get_mailbox_for_reply_phase
|
|
||||||
from app.message_utils import message_to_bytes
|
from app.message_utils import message_to_bytes
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
@ -168,21 +162,18 @@ from app.models import (
|
|||||||
VerpType,
|
VerpType,
|
||||||
SLDomain,
|
SLDomain,
|
||||||
)
|
)
|
||||||
from app.monitor_utils import send_version_event
|
|
||||||
from app.pgp_utils import (
|
from app.pgp_utils import (
|
||||||
PGPException,
|
PGPException,
|
||||||
sign_data_with_pgpy,
|
sign_data_with_pgpy,
|
||||||
sign_data,
|
sign_data,
|
||||||
load_public_key_and_check,
|
load_public_key_and_check,
|
||||||
)
|
)
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email, canonicalize_email
|
||||||
from init_app import load_pgp_public_keys
|
from init_app import load_pgp_public_keys
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_contact(
|
def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Contact:
|
||||||
from_header: str, mail_from: str, alias: Alias
|
|
||||||
) -> Optional[Contact]:
|
|
||||||
"""
|
"""
|
||||||
contact_from_header is the RFC 2047 format FROM header
|
contact_from_header is the RFC 2047 format FROM header
|
||||||
"""
|
"""
|
||||||
@ -204,18 +195,81 @@ def get_or_create_contact(
|
|||||||
mail_from,
|
mail_from,
|
||||||
)
|
)
|
||||||
contact_email = mail_from
|
contact_email = mail_from
|
||||||
contact_result = contact_utils.create_contact(
|
|
||||||
email=contact_email,
|
if not is_valid_email(contact_email):
|
||||||
alias=alias,
|
LOG.w(
|
||||||
|
"invalid contact email %s. Parse from %s %s",
|
||||||
|
contact_email,
|
||||||
|
from_header,
|
||||||
|
mail_from,
|
||||||
|
)
|
||||||
|
# either reuse a contact with empty email or create a new contact with empty email
|
||||||
|
contact_email = ""
|
||||||
|
|
||||||
|
contact_email = sanitize_email(contact_email, not_lower=True)
|
||||||
|
|
||||||
|
if contact_name and "\x00" in contact_name:
|
||||||
|
LOG.w("issue with contact name %s", contact_name)
|
||||||
|
contact_name = ""
|
||||||
|
|
||||||
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||||
|
if contact:
|
||||||
|
if contact.name != contact_name:
|
||||||
|
LOG.d(
|
||||||
|
"Update contact %s name %s to %s",
|
||||||
|
contact,
|
||||||
|
contact.name,
|
||||||
|
contact_name,
|
||||||
|
)
|
||||||
|
contact.name = contact_name
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
# contact created in the past does not have mail_from and from_header field
|
||||||
|
if not contact.mail_from and mail_from:
|
||||||
|
LOG.d(
|
||||||
|
"Set contact mail_from %s: %s to %s",
|
||||||
|
contact,
|
||||||
|
contact.mail_from,
|
||||||
|
mail_from,
|
||||||
|
)
|
||||||
|
contact.mail_from = mail_from
|
||||||
|
Session.commit()
|
||||||
|
else:
|
||||||
|
alias_id = alias.id
|
||||||
|
try:
|
||||||
|
contact_email_for_reply = (
|
||||||
|
contact_email if is_valid_email(contact_email) else ""
|
||||||
|
)
|
||||||
|
contact = Contact.create(
|
||||||
|
user_id=alias.user_id,
|
||||||
|
alias_id=alias_id,
|
||||||
|
website_email=contact_email,
|
||||||
name=contact_name,
|
name=contact_name,
|
||||||
mail_from=mail_from,
|
mail_from=mail_from,
|
||||||
allow_empty_email=True,
|
reply_email=generate_reply_email(contact_email_for_reply, alias),
|
||||||
automatic_created=True,
|
automatic_created=True,
|
||||||
from_partner=False,
|
|
||||||
)
|
)
|
||||||
if contact_result.error:
|
if not contact_email:
|
||||||
LOG.w(f"Error creating contact: {contact_result.error.value}")
|
LOG.d("Create a contact with invalid email for %s", alias)
|
||||||
return contact_result.contact
|
contact.invalid_email = True
|
||||||
|
|
||||||
|
LOG.d(
|
||||||
|
"create contact %s for %s, reverse alias:%s",
|
||||||
|
contact_email,
|
||||||
|
alias,
|
||||||
|
contact.reply_email,
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
except IntegrityError:
|
||||||
|
# If the tx has been rolled back, the connection is borked. Force close to try to get a new one and start fresh
|
||||||
|
Session.close()
|
||||||
|
LOG.info(
|
||||||
|
f"Contact with email {contact_email} for alias_id {alias_id} already existed, fetching from DB"
|
||||||
|
)
|
||||||
|
contact = Contact.get_by(alias_id=alias_id, website_email=contact_email)
|
||||||
|
|
||||||
|
return contact
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_reply_to_contact(
|
def get_or_create_reply_to_contact(
|
||||||
@ -240,7 +294,33 @@ def get_or_create_reply_to_contact(
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return contact_utils.create_contact(contact_address, alias, contact_name).contact
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
|
||||||
|
if contact:
|
||||||
|
return contact
|
||||||
|
else:
|
||||||
|
LOG.d(
|
||||||
|
"create contact %s for alias %s via reply-to header %s",
|
||||||
|
contact_address,
|
||||||
|
alias,
|
||||||
|
reply_to_header,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
contact = Contact.create(
|
||||||
|
user_id=alias.user_id,
|
||||||
|
alias_id=alias.id,
|
||||||
|
website_email=contact_address,
|
||||||
|
name=contact_name,
|
||||||
|
reply_email=generate_reply_email(contact_address, alias),
|
||||||
|
automatic_created=True,
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
except IntegrityError:
|
||||||
|
LOG.w("Contact %s %s already exist", alias, contact_address)
|
||||||
|
Session.rollback()
|
||||||
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
|
||||||
|
|
||||||
|
return contact
|
||||||
|
|
||||||
|
|
||||||
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||||
@ -565,7 +645,7 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||||||
|
|
||||||
if not user.is_active():
|
if not user.is_active():
|
||||||
LOG.w(f"User {user} has been soft deleted")
|
LOG.w(f"User {user} has been soft deleted")
|
||||||
return [(False, status.E502)]
|
return False, status.E502
|
||||||
|
|
||||||
if not user.can_send_or_receive():
|
if not user.can_send_or_receive():
|
||||||
LOG.i(f"User {user} cannot receive emails")
|
LOG.i(f"User {user} cannot receive emails")
|
||||||
@ -586,48 +666,19 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||||||
from_header = get_header_unicode(msg[headers.FROM])
|
from_header = get_header_unicode(msg[headers.FROM])
|
||||||
LOG.d("Create or get contact for from_header:%s", from_header)
|
LOG.d("Create or get contact for from_header:%s", from_header)
|
||||||
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
|
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
|
||||||
if not contact:
|
|
||||||
return [(False, status.E504)]
|
|
||||||
alias = (
|
alias = (
|
||||||
contact.alias
|
contact.alias
|
||||||
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
||||||
|
|
||||||
reply_to_contact = []
|
reply_to_contact = None
|
||||||
if msg[headers.REPLY_TO]:
|
if msg[headers.REPLY_TO]:
|
||||||
reply_to_header_contents = get_header_unicode(msg[headers.REPLY_TO])
|
reply_to = get_header_unicode(msg[headers.REPLY_TO])
|
||||||
if reply_to_header_contents:
|
LOG.d("Create or get contact for reply_to_header:%s", reply_to)
|
||||||
LOG.d(
|
# ignore when reply-to = alias
|
||||||
"Create or get contact for reply_to_header:%s", reply_to_header_contents
|
if reply_to == alias.email:
|
||||||
)
|
|
||||||
for reply_to in [
|
|
||||||
reply_to.strip()
|
|
||||||
for reply_to in reply_to_header_contents.split(",")
|
|
||||||
if reply_to.strip()
|
|
||||||
]:
|
|
||||||
try:
|
|
||||||
reply_to_name, reply_to_email = parse_full_address(reply_to)
|
|
||||||
except ValueError:
|
|
||||||
LOG.d(f"Could not parse reply-to address {reply_to}")
|
|
||||||
continue
|
|
||||||
if reply_to_email == alias.email:
|
|
||||||
LOG.i("Reply-to same as alias %s", alias)
|
LOG.i("Reply-to same as alias %s", alias)
|
||||||
else:
|
else:
|
||||||
reply_contact = get_or_create_reply_to_contact(
|
reply_to_contact = get_or_create_reply_to_contact(reply_to, alias, msg)
|
||||||
reply_to_email, alias, msg
|
|
||||||
)
|
|
||||||
if reply_contact:
|
|
||||||
reply_to_contact.append(reply_contact)
|
|
||||||
|
|
||||||
if alias.user.delete_on is not None:
|
|
||||||
LOG.d(f"user {user} is pending to be deleted. Do not forward")
|
|
||||||
EmailLog.create(
|
|
||||||
contact_id=contact.id,
|
|
||||||
user_id=contact.user_id,
|
|
||||||
blocked=True,
|
|
||||||
alias_id=contact.alias_id,
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
return [(True, status.E502)]
|
|
||||||
|
|
||||||
if not alias.enabled or contact.block_forward:
|
if not alias.enabled or contact.block_forward:
|
||||||
LOG.d("%s is disabled, do not forward", alias)
|
LOG.d("%s is disabled, do not forward", alias)
|
||||||
@ -719,7 +770,7 @@ def forward_email_to_mailbox(
|
|||||||
envelope,
|
envelope,
|
||||||
mailbox,
|
mailbox,
|
||||||
user,
|
user,
|
||||||
reply_to_contacts: list[Contact],
|
reply_to_contact: Optional[Contact],
|
||||||
) -> (bool, str):
|
) -> (bool, str):
|
||||||
LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox)
|
LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox)
|
||||||
|
|
||||||
@ -767,7 +818,7 @@ def forward_email_to_mailbox(
|
|||||||
|
|
||||||
email_log = EmailLog.create(
|
email_log = EmailLog.create(
|
||||||
contact_id=contact.id,
|
contact_id=contact.id,
|
||||||
user_id=contact.user_id,
|
user_id=user.id,
|
||||||
mailbox_id=mailbox.id,
|
mailbox_id=mailbox.id,
|
||||||
alias_id=contact.alias_id,
|
alias_id=contact.alias_id,
|
||||||
message_id=str(msg[headers.MESSAGE_ID]),
|
message_id=str(msg[headers.MESSAGE_ID]),
|
||||||
@ -902,13 +953,11 @@ def forward_email_to_mailbox(
|
|||||||
add_or_replace_header(msg, "From", new_from_header)
|
add_or_replace_header(msg, "From", new_from_header)
|
||||||
LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header)
|
LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header)
|
||||||
|
|
||||||
if len(reply_to_contacts) > 0:
|
if reply_to_contact:
|
||||||
original_reply_to = get_header_unicode(msg[headers.REPLY_TO])
|
reply_to_header = msg[headers.REPLY_TO]
|
||||||
new_reply_to_header = ", ".join(
|
new_reply_to_header = reply_to_contact.new_addr()
|
||||||
[reply_to_contact.new_addr() for reply_to_contact in reply_to_contacts][:5]
|
|
||||||
)
|
|
||||||
add_or_replace_header(msg, "Reply-To", new_reply_to_header)
|
add_or_replace_header(msg, "Reply-To", new_reply_to_header)
|
||||||
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, original_reply_to)
|
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, reply_to_header)
|
||||||
|
|
||||||
# replace CC & To emails by reverse-alias for all emails that are not alias
|
# replace CC & To emails by reverse-alias for all emails that are not alias
|
||||||
try:
|
try:
|
||||||
@ -1040,6 +1089,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
return False, status.E503
|
return False, status.E503
|
||||||
|
|
||||||
user = alias.user
|
user = alias.user
|
||||||
|
mail_from = envelope.mail_from
|
||||||
|
|
||||||
if not user.can_send_or_receive():
|
if not user.can_send_or_receive():
|
||||||
LOG.i(f"User {user} cannot send emails")
|
LOG.i(f"User {user} cannot send emails")
|
||||||
@ -1053,15 +1103,13 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
return False, dmarc_delivery_status
|
return False, dmarc_delivery_status
|
||||||
|
|
||||||
# Anti-spoofing
|
# Anti-spoofing
|
||||||
mailbox = get_mailbox_for_reply_phase(
|
mailbox = get_mailbox_from_mail_from(mail_from, alias)
|
||||||
envelope.mail_from, get_header_unicode(msg[headers.FROM]), alias
|
|
||||||
)
|
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
if alias.disable_email_spoofing_check:
|
if alias.disable_email_spoofing_check:
|
||||||
# ignore this error, use default alias mailbox
|
# ignore this error, use default alias mailbox
|
||||||
LOG.w(
|
LOG.w(
|
||||||
"ignore unknown sender to reverse-alias %s: %s -> %s",
|
"ignore unknown sender to reverse-alias %s: %s -> %s",
|
||||||
envelope.mail_from,
|
mail_from,
|
||||||
alias,
|
alias,
|
||||||
contact,
|
contact,
|
||||||
)
|
)
|
||||||
@ -1204,11 +1252,23 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
recipient_name = get_alias_recipient_name(alias)
|
# make the email comes from alias
|
||||||
if recipient_name.message:
|
from_header = alias.email
|
||||||
LOG.d(recipient_name.message)
|
# add alias name from alias
|
||||||
LOG.d("From header is %s", recipient_name.name)
|
if alias.name:
|
||||||
add_or_replace_header(msg, headers.FROM, recipient_name.name)
|
LOG.d("Put alias name %s in from header", alias.name)
|
||||||
|
from_header = sl_formataddr((alias.name, alias.email))
|
||||||
|
elif alias.custom_domain:
|
||||||
|
# add alias name from domain
|
||||||
|
if alias.custom_domain.name:
|
||||||
|
LOG.d(
|
||||||
|
"Put domain default alias name %s in from header",
|
||||||
|
alias.custom_domain.name,
|
||||||
|
)
|
||||||
|
from_header = sl_formataddr((alias.custom_domain.name, alias.email))
|
||||||
|
|
||||||
|
LOG.d("From header is %s", from_header)
|
||||||
|
add_or_replace_header(msg, headers.FROM, from_header)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||||
@ -1400,6 +1460,32 @@ def replace_original_message_id(alias: Alias, email_log: EmailLog, msg: Message)
|
|||||||
msg[headers.REFERENCES] = " ".join(new_message_ids)
|
msg[headers.REFERENCES] = " ".join(new_message_ids)
|
||||||
|
|
||||||
|
|
||||||
|
def get_mailbox_from_mail_from(mail_from: str, alias) -> Optional[Mailbox]:
|
||||||
|
"""return the corresponding mailbox given the mail_from and alias
|
||||||
|
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __check(email_address: str, alias: Alias) -> Optional[Mailbox]:
|
||||||
|
for mailbox in alias.mailboxes:
|
||||||
|
if mailbox.email == email_address:
|
||||||
|
return mailbox
|
||||||
|
|
||||||
|
for authorized_address in mailbox.authorized_addresses:
|
||||||
|
if authorized_address.email == email_address:
|
||||||
|
LOG.d(
|
||||||
|
"Found an authorized address for %s %s %s",
|
||||||
|
alias,
|
||||||
|
mailbox,
|
||||||
|
authorized_address,
|
||||||
|
)
|
||||||
|
return mailbox
|
||||||
|
return None
|
||||||
|
|
||||||
|
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
||||||
|
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
||||||
|
return __check(mail_from, alias) or __check(canonicalize_email(mail_from), alias)
|
||||||
|
|
||||||
|
|
||||||
def handle_unknown_mailbox(
|
def handle_unknown_mailbox(
|
||||||
envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact
|
envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact
|
||||||
):
|
):
|
||||||
@ -1515,9 +1601,7 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
|
|||||||
LOG.w(
|
LOG.w(
|
||||||
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
||||||
)
|
)
|
||||||
change_alias_status(
|
change_alias_status(alias, enabled=False)
|
||||||
alias, enabled=False, message=f"Set enabled=False due to {reason}"
|
|
||||||
)
|
|
||||||
|
|
||||||
Notification.create(
|
Notification.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
@ -1669,7 +1753,7 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
|
|||||||
)
|
)
|
||||||
Notification.create(
|
Notification.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
title=f"Email cannot be sent to {contact.email} from your alias {alias.email}",
|
title=f"Email cannot be sent to { contact.email } from your alias { alias.email }",
|
||||||
message=Notification.render(
|
message=Notification.render(
|
||||||
"notification/bounce-reply-phase.html",
|
"notification/bounce-reply-phase.html",
|
||||||
alias=alias,
|
alias=alias,
|
||||||
@ -1682,7 +1766,7 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
|
|||||||
user,
|
user,
|
||||||
ALERT_BOUNCE_EMAIL_REPLY_PHASE,
|
ALERT_BOUNCE_EMAIL_REPLY_PHASE,
|
||||||
mailbox.email,
|
mailbox.email,
|
||||||
f"Email cannot be sent to {contact.email} from your alias {alias.email}",
|
f"Email cannot be sent to { contact.email } from your alias { alias.email }",
|
||||||
render(
|
render(
|
||||||
"transactional/bounce/bounce-email-reply-phase.txt",
|
"transactional/bounce/bounce-email-reply-phase.txt",
|
||||||
user=user,
|
user=user,
|
||||||
@ -2362,7 +2446,6 @@ class MailHandler:
|
|||||||
"Custom/nb_rcpt_tos", len(envelope.rcpt_tos)
|
"Custom/nb_rcpt_tos", len(envelope.rcpt_tos)
|
||||||
)
|
)
|
||||||
|
|
||||||
send_version_event("email_handler")
|
|
||||||
with create_light_app().app_context():
|
with create_light_app().app_context():
|
||||||
return_status = handle(envelope, msg)
|
return_status = handle(envelope, msg)
|
||||||
elapsed = time.time() - start
|
elapsed = time.time() - start
|
||||||
@ -2398,7 +2481,6 @@ def main(port: int):
|
|||||||
|
|
||||||
controller.start()
|
controller.start()
|
||||||
LOG.d("Start mail controller %s %s", controller.hostname, controller.port)
|
LOG.d("Start mail controller %s %s", controller.hostname, controller.port)
|
||||||
send_version_event("email_handler")
|
|
||||||
|
|
||||||
if LOAD_PGP_EMAIL_HANDLER:
|
if LOAD_PGP_EMAIL_HANDLER:
|
||||||
LOG.w("LOAD PGP keys")
|
LOG.w("LOAD PGP keys")
|
||||||
|
@ -4,13 +4,12 @@ from sys import argv, exit
|
|||||||
|
|
||||||
from app.config import EVENT_LISTENER_DB_URI
|
from app.config import EVENT_LISTENER_DB_URI
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.monitor_utils import send_version_event
|
|
||||||
from events import event_debugger
|
from events import event_debugger
|
||||||
from events.runner import Runner
|
from events.runner import Runner
|
||||||
from events.event_source import DeadLetterEventSource, PostgresEventSource
|
from events.event_source import DeadLetterEventSource, PostgresEventSource
|
||||||
from events.event_sink import ConsoleEventSink, HttpEventSink
|
from events.event_sink import ConsoleEventSink, HttpEventSink
|
||||||
|
|
||||||
_DEFAULT_MAX_RETRIES = 10
|
_DEFAULT_MAX_RETRIES = 100
|
||||||
|
|
||||||
|
|
||||||
class Mode(Enum):
|
class Mode(Enum):
|
||||||
@ -31,11 +30,9 @@ def main(mode: Mode, dry_run: bool, max_retries: int):
|
|||||||
if mode == Mode.DEAD_LETTER:
|
if mode == Mode.DEAD_LETTER:
|
||||||
LOG.i("Using DeadLetterEventSource")
|
LOG.i("Using DeadLetterEventSource")
|
||||||
source = DeadLetterEventSource(max_retries)
|
source = DeadLetterEventSource(max_retries)
|
||||||
service_name = "event_listener_dead_letter"
|
|
||||||
elif mode == Mode.LISTENER:
|
elif mode == Mode.LISTENER:
|
||||||
LOG.i("Using PostgresEventSource")
|
LOG.i("Using PostgresEventSource")
|
||||||
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
|
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
|
||||||
service_name = "event_listener"
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid mode: {mode}")
|
raise ValueError(f"Invalid mode: {mode}")
|
||||||
|
|
||||||
@ -46,8 +43,7 @@ def main(mode: Mode, dry_run: bool, max_retries: int):
|
|||||||
LOG.i("Starting with HttpEventSink")
|
LOG.i("Starting with HttpEventSink")
|
||||||
sink = HttpEventSink()
|
sink = HttpEventSink()
|
||||||
|
|
||||||
send_version_event(service_name)
|
runner = Runner(source=source, sink=sink)
|
||||||
runner = Runner(source=source, sink=sink, service_name=service_name)
|
|
||||||
runner.run()
|
runner.run()
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,10 +12,6 @@ class EventSink(ABC):
|
|||||||
def process(self, event: SyncEvent) -> bool:
|
def process(self, event: SyncEvent) -> bool:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def send_data_to_webhook(self, data: bytes) -> bool:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HttpEventSink(EventSink):
|
class HttpEventSink(EventSink):
|
||||||
def process(self, event: SyncEvent) -> bool:
|
def process(self, event: SyncEvent) -> bool:
|
||||||
@ -25,28 +21,20 @@ class HttpEventSink(EventSink):
|
|||||||
|
|
||||||
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
|
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
|
||||||
|
|
||||||
if self.send_data_to_webhook(event.content):
|
|
||||||
LOG.info(f"Event {event.id} sent successfully to webhook")
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def send_data_to_webhook(self, data: bytes) -> bool:
|
|
||||||
res = requests.post(
|
res = requests.post(
|
||||||
url=EVENT_WEBHOOK,
|
url=EVENT_WEBHOOK,
|
||||||
data=data,
|
data=event.content,
|
||||||
headers={"Content-Type": "application/x-protobuf"},
|
headers={"Content-Type": "application/x-protobuf"},
|
||||||
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
|
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
|
||||||
)
|
)
|
||||||
newrelic.agent.record_custom_event(
|
newrelic.agent.record_custom_event("event_sent", {"http_code": res.status_code})
|
||||||
"EventSentToPartner", {"http_code": res.status_code}
|
|
||||||
)
|
|
||||||
if res.status_code != 200:
|
if res.status_code != 200:
|
||||||
LOG.warning(
|
LOG.warning(
|
||||||
f"Failed to send event to webhook: {res.status_code} {res.text}"
|
f"Failed to send event to webhook: {res.status_code} {res.text}"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
|
LOG.info(f"Event {event.id} sent successfully to webhook")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -54,7 +42,3 @@ class ConsoleEventSink(EventSink):
|
|||||||
def process(self, event: SyncEvent) -> bool:
|
def process(self, event: SyncEvent) -> bool:
|
||||||
LOG.info(f"Handling event {event.id}")
|
LOG.info(f"Handling event {event.id}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def send_data_to_webhook(self, data: bytes) -> bool:
|
|
||||||
LOG.info(f"Sending {len(data)} bytes to webhook")
|
|
||||||
return True
|
|
||||||
|
@ -72,9 +72,7 @@ class PostgresEventSource(EventSource):
|
|||||||
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
|
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
|
||||||
|
|
||||||
def __connect(self):
|
def __connect(self):
|
||||||
self.__connection = psycopg2.connect(
|
self.__connection = psycopg2.connect(self.__connection_string)
|
||||||
self.__connection_string, application_name="sl-event-listen"
|
|
||||||
)
|
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
|
||||||
@ -85,28 +83,24 @@ class DeadLetterEventSource(EventSource):
|
|||||||
def __init__(self, max_retries: int):
|
def __init__(self, max_retries: int):
|
||||||
self.__max_retries = max_retries
|
self.__max_retries = max_retries
|
||||||
|
|
||||||
def execute_loop(
|
@newrelic.agent.background_task()
|
||||||
self, on_event: Callable[[SyncEvent], NoReturn]
|
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||||
) -> list[SyncEvent]:
|
while True:
|
||||||
threshold = arrow.utcnow().shift(minutes=-_DEAD_LETTER_THRESHOLD_MINUTES)
|
try:
|
||||||
|
threshold = arrow.utcnow().shift(
|
||||||
|
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
|
||||||
|
)
|
||||||
events = SyncEvent.get_dead_letter(
|
events = SyncEvent.get_dead_letter(
|
||||||
older_than=threshold, max_retries=self.__max_retries
|
older_than=threshold, max_retries=self.__max_retries
|
||||||
)
|
)
|
||||||
if events:
|
if events:
|
||||||
LOG.info(f"Got {len(events)} dead letter events")
|
LOG.info(f"Got {len(events)} dead letter events")
|
||||||
|
if events:
|
||||||
newrelic.agent.record_custom_metric(
|
newrelic.agent.record_custom_metric(
|
||||||
"Custom/dead_letter_events_to_process", len(events)
|
"Custom/dead_letter_events_to_process", len(events)
|
||||||
)
|
)
|
||||||
for event in events:
|
for event in events:
|
||||||
if event.mark_as_taken(allow_taken_older_than=threshold):
|
|
||||||
on_event(event)
|
on_event(event)
|
||||||
return events
|
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
|
||||||
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
events = self.execute_loop(on_event)
|
|
||||||
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
|
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
|
||||||
if not events:
|
if not events:
|
||||||
LOG.debug("No dead letter events")
|
LOG.debug("No dead letter events")
|
||||||
|
@ -4,24 +4,20 @@ import newrelic.agent
|
|||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import SyncEvent
|
from app.models import SyncEvent
|
||||||
from app.monitor_utils import send_version_event
|
|
||||||
from events.event_sink import EventSink
|
from events.event_sink import EventSink
|
||||||
from events.event_source import EventSource
|
from events.event_source import EventSource
|
||||||
|
|
||||||
|
|
||||||
class Runner:
|
class Runner:
|
||||||
def __init__(self, source: EventSource, sink: EventSink, service_name: str = ""):
|
def __init__(self, source: EventSource, sink: EventSink):
|
||||||
self.__source = source
|
self.__source = source
|
||||||
self.__sink = sink
|
self.__sink = sink
|
||||||
self.__service_name = service_name
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.__source.run(self.__on_event)
|
self.__source.run(self.__on_event)
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
@newrelic.agent.background_task()
|
||||||
def __on_event(self, event: SyncEvent):
|
def __on_event(self, event: SyncEvent):
|
||||||
if self.__service_name:
|
|
||||||
send_version_event(self.__service_name)
|
|
||||||
try:
|
try:
|
||||||
event_created_at = event.created_at
|
event_created_at = event.created_at
|
||||||
start_time = arrow.now()
|
start_time = arrow.now()
|
||||||
|
@ -19,7 +19,7 @@ URL=http://localhost:7777
|
|||||||
NOT_SEND_EMAIL=true
|
NOT_SEND_EMAIL=true
|
||||||
|
|
||||||
# domain used to create alias
|
# domain used to create alias
|
||||||
EMAIL_DOMAIN=sl.lan
|
EMAIL_DOMAIN=sl.local
|
||||||
|
|
||||||
# Allow SimpleLogin to enforce SPF by using the extra headers from postfix
|
# Allow SimpleLogin to enforce SPF by using the extra headers from postfix
|
||||||
# ENFORCE_SPF=true
|
# ENFORCE_SPF=true
|
||||||
@ -37,18 +37,18 @@ EMAIL_DOMAIN=sl.lan
|
|||||||
# FIRST_ALIAS_DOMAIN = another-domain.com
|
# FIRST_ALIAS_DOMAIN = another-domain.com
|
||||||
|
|
||||||
# transactional email is sent from this email address
|
# transactional email is sent from this email address
|
||||||
SUPPORT_EMAIL=support@sl.lan
|
SUPPORT_EMAIL=support@sl.local
|
||||||
SUPPORT_NAME=Son from SimpleLogin
|
SUPPORT_NAME=Son from SimpleLogin
|
||||||
|
|
||||||
# To use VERP
|
# To use VERP
|
||||||
# prefix must end with + and suffix must start with +
|
# prefix must end with + and suffix must start with +
|
||||||
# BOUNCE_PREFIX = "bounces+"
|
# BOUNCE_PREFIX = "bounces+"
|
||||||
# BOUNCE_SUFFIX = "+@sl.lan"
|
# BOUNCE_SUFFIX = "+@sl.local"
|
||||||
# same as BOUNCE_PREFIX but used for reply phase. Note it doesn't have the plus sign (+) at the end.
|
# same as BOUNCE_PREFIX but used for reply phase. Note it doesn't have the plus sign (+) at the end.
|
||||||
# BOUNCE_PREFIX_FOR_REPLY_PHASE = "bounce_reply"
|
# BOUNCE_PREFIX_FOR_REPLY_PHASE = "bounce_reply"
|
||||||
|
|
||||||
# to receive general stats.
|
# to receive general stats.
|
||||||
# ADMIN_EMAIL=admin@sl.lan
|
# ADMIN_EMAIL=admin@sl.local
|
||||||
|
|
||||||
# Max number emails user can generate for free plan
|
# Max number emails user can generate for free plan
|
||||||
# Set to 5 by default
|
# Set to 5 by default
|
||||||
|
@ -6,7 +6,7 @@ from app.db import Session
|
|||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox, Contact, SLDomain, Partner
|
from app.models import Mailbox, Contact, SLDomain, Partner
|
||||||
from app.pgp_utils import load_public_key
|
from app.pgp_utils import load_public_key
|
||||||
from app.proton.proton_partner import PROTON_PARTNER_NAME
|
from app.proton.utils import PROTON_PARTNER_NAME
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
|
|
||||||
@ -56,15 +56,14 @@ def add_sl_domains():
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
def add_proton_partner() -> Partner:
|
def add_proton_partner():
|
||||||
proton_partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
proton_partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
||||||
if not proton_partner:
|
if not proton_partner:
|
||||||
proton_partner = Partner.create(
|
Partner.create(
|
||||||
name=PROTON_PARTNER_NAME,
|
name=PROTON_PARTNER_NAME,
|
||||||
contact_email="simplelogin@protonmail.com",
|
contact_email="simplelogin@protonmail.com",
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return proton_partner
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -2,18 +2,13 @@
|
|||||||
Run scheduled jobs.
|
Run scheduled jobs.
|
||||||
Not meant for running job at precise time (+- 1h)
|
Not meant for running job at precise time (+- 1h)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from typing import List, Optional
|
from typing import List
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import newrelic.agent
|
|
||||||
from sqlalchemy.orm import Query
|
|
||||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
|
||||||
from sqlalchemy.sql.expression import or_, and_
|
from sqlalchemy.sql.expression import or_, and_
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.constants import JobType
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
send_email,
|
send_email,
|
||||||
@ -23,16 +18,10 @@ from app.events.event_dispatcher import PostgresDispatcher
|
|||||||
from app.import_utils import handle_batch_import
|
from app.import_utils import handle_batch_import
|
||||||
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
||||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||||
from app.jobs.send_event_job import SendEventToWebhookJob
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
||||||
from app.monitor_utils import send_version_event
|
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
|
||||||
from events.event_sink import HttpEventSink
|
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
_MAX_JOBS_PER_BATCH = 50
|
|
||||||
|
|
||||||
|
|
||||||
def onboarding_send_from_alias(user):
|
def onboarding_send_from_alias(user):
|
||||||
comm_email, unsubscribe_link, via_email = user.get_communication_email()
|
comm_email, unsubscribe_link, via_email = user.get_communication_email()
|
||||||
@ -139,7 +128,7 @@ def welcome_proton(user):
|
|||||||
|
|
||||||
def delete_mailbox_job(job: Job):
|
def delete_mailbox_job(job: Job):
|
||||||
mailbox_id = job.payload.get("mailbox_id")
|
mailbox_id = job.payload.get("mailbox_id")
|
||||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -163,18 +152,10 @@ def delete_mailbox_job(job: Job):
|
|||||||
|
|
||||||
mailbox_email = mailbox.email
|
mailbox_email = mailbox.email
|
||||||
user = mailbox.user
|
user = mailbox.user
|
||||||
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.DeleteMailbox,
|
|
||||||
message=f"Delete mailbox {mailbox.id} ({mailbox.email})",
|
|
||||||
)
|
|
||||||
Mailbox.delete(mailbox_id)
|
Mailbox.delete(mailbox_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
||||||
|
|
||||||
if not job.payload.get("send_mail", True):
|
|
||||||
return
|
|
||||||
if alias_transferred_to:
|
if alias_transferred_to:
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
@ -198,8 +179,7 @@ SimpleLogin team.
|
|||||||
|
|
||||||
|
|
||||||
def process_job(job: Job):
|
def process_job(job: Job):
|
||||||
send_version_event("job_runner")
|
if job.name == config.JOB_ONBOARDING_1:
|
||||||
if job.name == JobType.ONBOARDING_1.value:
|
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
|
|
||||||
@ -208,7 +188,7 @@ def process_job(job: Job):
|
|||||||
if user and user.notification and user.activated:
|
if user and user.notification and user.activated:
|
||||||
LOG.d("send onboarding send-from-alias email to user %s", user)
|
LOG.d("send onboarding send-from-alias email to user %s", user)
|
||||||
onboarding_send_from_alias(user)
|
onboarding_send_from_alias(user)
|
||||||
elif job.name == JobType.ONBOARDING_2.value:
|
elif job.name == config.JOB_ONBOARDING_2:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
|
|
||||||
@ -217,7 +197,7 @@ def process_job(job: Job):
|
|||||||
if user and user.notification and user.activated:
|
if user and user.notification and user.activated:
|
||||||
LOG.d("send onboarding mailbox email to user %s", user)
|
LOG.d("send onboarding mailbox email to user %s", user)
|
||||||
onboarding_mailbox(user)
|
onboarding_mailbox(user)
|
||||||
elif job.name == JobType.ONBOARDING_4.value:
|
elif job.name == config.JOB_ONBOARDING_4:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user: User = User.get(user_id)
|
user: User = User.get(user_id)
|
||||||
|
|
||||||
@ -232,11 +212,11 @@ def process_job(job: Job):
|
|||||||
LOG.d("send onboarding pgp email to user %s", user)
|
LOG.d("send onboarding pgp email to user %s", user)
|
||||||
onboarding_pgp(user)
|
onboarding_pgp(user)
|
||||||
|
|
||||||
elif job.name == JobType.BATCH_IMPORT.value:
|
elif job.name == config.JOB_BATCH_IMPORT:
|
||||||
batch_import_id = job.payload.get("batch_import_id")
|
batch_import_id = job.payload.get("batch_import_id")
|
||||||
batch_import = BatchImport.get(batch_import_id)
|
batch_import = BatchImport.get(batch_import_id)
|
||||||
handle_batch_import(batch_import)
|
handle_batch_import(batch_import)
|
||||||
elif job.name == JobType.DELETE_ACCOUNT.value:
|
elif job.name == config.JOB_DELETE_ACCOUNT:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
|
|
||||||
@ -255,57 +235,44 @@ def process_job(job: Job):
|
|||||||
)
|
)
|
||||||
User.delete(user.id)
|
User.delete(user.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
elif job.name == JobType.DELETE_MAILBOX.value:
|
elif job.name == config.JOB_DELETE_MAILBOX:
|
||||||
delete_mailbox_job(job)
|
delete_mailbox_job(job)
|
||||||
|
|
||||||
elif job.name == JobType.DELETE_DOMAIN.value:
|
elif job.name == config.JOB_DELETE_DOMAIN:
|
||||||
custom_domain_id = job.payload.get("custom_domain_id")
|
custom_domain_id = job.payload.get("custom_domain_id")
|
||||||
custom_domain: Optional[CustomDomain] = CustomDomain.get(custom_domain_id)
|
custom_domain = CustomDomain.get(custom_domain_id)
|
||||||
if not custom_domain:
|
if not custom_domain:
|
||||||
return
|
return
|
||||||
|
|
||||||
is_subdomain = custom_domain.is_sl_subdomain
|
|
||||||
domain_name = custom_domain.domain
|
domain_name = custom_domain.domain
|
||||||
user = custom_domain.user
|
user = custom_domain.user
|
||||||
|
|
||||||
custom_domain_partner_id = custom_domain.partner_id
|
|
||||||
CustomDomain.delete(custom_domain.id)
|
CustomDomain.delete(custom_domain.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
if is_subdomain:
|
|
||||||
message = f"Delete subdomain {custom_domain_id} ({domain_name})"
|
|
||||||
else:
|
|
||||||
message = f"Delete custom domain {custom_domain_id} ({domain_name})"
|
|
||||||
emit_user_audit_log(
|
|
||||||
user=user,
|
|
||||||
action=UserAuditLogAction.DeleteCustomDomain,
|
|
||||||
message=message,
|
|
||||||
)
|
|
||||||
|
|
||||||
LOG.d("Domain %s deleted", domain_name)
|
LOG.d("Domain %s deleted", domain_name)
|
||||||
|
|
||||||
if custom_domain_partner_id is None:
|
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
f"Your domain {domain_name} has been deleted",
|
f"Your domain {domain_name} has been deleted",
|
||||||
f"""Domain {domain_name} along with its aliases are deleted successfully.
|
f"""Domain {domain_name} along with its aliases are deleted successfully.
|
||||||
|
|
||||||
Regards,
|
Regards,
|
||||||
SimpleLogin team.
|
SimpleLogin team.
|
||||||
""",
|
""",
|
||||||
retries=3,
|
retries=3,
|
||||||
)
|
)
|
||||||
elif job.name == JobType.SEND_USER_REPORT.value:
|
elif job.name == config.JOB_SEND_USER_REPORT:
|
||||||
export_job = ExportUserDataJob.create_from_job(job)
|
export_job = ExportUserDataJob.create_from_job(job)
|
||||||
if export_job:
|
if export_job:
|
||||||
export_job.run()
|
export_job.run()
|
||||||
elif job.name == JobType.SEND_PROTON_WELCOME_1.value:
|
elif job.name == config.JOB_SEND_PROTON_WELCOME_1:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
if user and user.activated:
|
if user and user.activated:
|
||||||
LOG.d("Send proton welcome email to user %s", user)
|
LOG.d("Send proton welcome email to user %s", user)
|
||||||
welcome_proton(user)
|
welcome_proton(user)
|
||||||
elif job.name == JobType.SEND_ALIAS_CREATION_EVENTS.value:
|
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
if user and user.activated:
|
if user and user.activated:
|
||||||
@ -313,111 +280,48 @@ def process_job(job: Job):
|
|||||||
send_alias_creation_events_for_user(
|
send_alias_creation_events_for_user(
|
||||||
user, dispatcher=PostgresDispatcher.get()
|
user, dispatcher=PostgresDispatcher.get()
|
||||||
)
|
)
|
||||||
elif job.name == JobType.SEND_EVENT_TO_WEBHOOK.value:
|
|
||||||
send_job = SendEventToWebhookJob.create_from_job(job)
|
|
||||||
if send_job:
|
|
||||||
send_job.run(HttpEventSink())
|
|
||||||
else:
|
else:
|
||||||
LOG.e("Unknown job name %s", job.name)
|
LOG.e("Unknown job name %s", job.name)
|
||||||
|
|
||||||
|
|
||||||
def get_jobs_to_run_query(taken_before_time: arrow.Arrow) -> Query:
|
def get_jobs_to_run() -> List[Job]:
|
||||||
# Get jobs that match all conditions:
|
# Get jobs that match all conditions:
|
||||||
# - Job.state == ready OR (Job.state == taken AND Job.taken_at < now - 30 mins AND Job.attempts < 5)
|
# - Job.state == ready OR (Job.state == taken AND Job.taken_at < now - 30 mins AND Job.attempts < 5)
|
||||||
# - Job.run_at is Null OR Job.run_at < now + 10 mins
|
# - Job.run_at is Null OR Job.run_at < now + 10 mins
|
||||||
|
taken_at_earliest = arrow.now().shift(minutes=-config.JOB_TAKEN_RETRY_WAIT_MINS)
|
||||||
run_at_earliest = arrow.now().shift(minutes=+10)
|
run_at_earliest = arrow.now().shift(minutes=+10)
|
||||||
return Job.filter(
|
query = Job.filter(
|
||||||
and_(
|
and_(
|
||||||
or_(
|
or_(
|
||||||
Job.state == JobState.ready.value,
|
Job.state == JobState.ready.value,
|
||||||
and_(
|
and_(
|
||||||
Job.state == JobState.taken.value,
|
Job.state == JobState.taken.value,
|
||||||
Job.taken_at < taken_before_time,
|
Job.taken_at < taken_at_earliest,
|
||||||
Job.attempts < config.JOB_MAX_ATTEMPTS,
|
Job.attempts < config.JOB_MAX_ATTEMPTS,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
or_(Job.run_at.is_(None), and_(Job.run_at <= run_at_earliest)),
|
or_(Job.run_at.is_(None), and_(Job.run_at <= run_at_earliest)),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
def get_jobs_to_run(taken_before_time: arrow.Arrow) -> List[Job]:
|
|
||||||
query = get_jobs_to_run_query(taken_before_time)
|
|
||||||
return (
|
|
||||||
query.order_by(Job.priority.desc())
|
|
||||||
.order_by(Job.run_at.asc())
|
|
||||||
.limit(_MAX_JOBS_PER_BATCH)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def take_job(job: Job, taken_before_time: arrow.Arrow) -> bool:
|
|
||||||
sql = """
|
|
||||||
UPDATE job
|
|
||||||
SET
|
|
||||||
taken_at = :taken_time,
|
|
||||||
attempts = attempts + 1,
|
|
||||||
state = :taken_state
|
|
||||||
WHERE id = :job_id
|
|
||||||
AND (state = :ready_state OR (state=:taken_state AND taken_at < :taken_before_time))
|
|
||||||
"""
|
|
||||||
args = {
|
|
||||||
"taken_time": arrow.now().datetime,
|
|
||||||
"job_id": job.id,
|
|
||||||
"ready_state": JobState.ready.value,
|
|
||||||
"taken_state": JobState.taken.value,
|
|
||||||
"taken_before_time": taken_before_time.datetime,
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
res = Session.execute(sql, args)
|
|
||||||
Session.commit()
|
|
||||||
except ObjectDeletedError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return res.rowcount > 0
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
send_version_event("job_runner")
|
|
||||||
while True:
|
while True:
|
||||||
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
||||||
with create_light_app().app_context():
|
with create_light_app().app_context():
|
||||||
taken_before_time = arrow.now().shift(
|
for job in get_jobs_to_run():
|
||||||
minutes=-config.JOB_TAKEN_RETRY_WAIT_MINS
|
|
||||||
)
|
|
||||||
|
|
||||||
jobs_done = 0
|
|
||||||
for job in get_jobs_to_run(taken_before_time):
|
|
||||||
if not take_job(job, taken_before_time):
|
|
||||||
continue
|
|
||||||
LOG.d("Take job %s", job)
|
LOG.d("Take job %s", job)
|
||||||
|
|
||||||
try:
|
# mark the job as taken, whether it will be executed successfully or not
|
||||||
newrelic.agent.record_custom_event("ProcessJob", {"job": job.name})
|
job.taken = True
|
||||||
|
job.taken_at = arrow.now()
|
||||||
|
job.state = JobState.taken.value
|
||||||
|
job.attempts += 1
|
||||||
|
Session.commit()
|
||||||
process_job(job)
|
process_job(job)
|
||||||
job_result = "success"
|
|
||||||
|
|
||||||
job.state = JobState.done.value
|
job.state = JobState.done.value
|
||||||
jobs_done += 1
|
|
||||||
except Exception as e:
|
|
||||||
LOG.warn(f"Error processing job (id={job.id} name={job.name}): {e}")
|
|
||||||
|
|
||||||
# Increment manually, as the attempts increment is done by the take_job but not
|
|
||||||
# updated in our instance
|
|
||||||
job_attempts = job.attempts + 1
|
|
||||||
if job_attempts >= config.JOB_MAX_ATTEMPTS:
|
|
||||||
LOG.warn(
|
|
||||||
f"Marking job (id={job.id} name={job.name} attempts={job_attempts}) as ERROR"
|
|
||||||
)
|
|
||||||
job.state = JobState.error.value
|
|
||||||
job_result = "error"
|
|
||||||
else:
|
|
||||||
job_result = "retry"
|
|
||||||
|
|
||||||
newrelic.agent.record_custom_event(
|
|
||||||
"JobProcessed", {"job": job.name, "result": job_result}
|
|
||||||
)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
if jobs_done == 0:
|
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
abacus
|
abacus
|
||||||
|
abdomen
|
||||||
|
abdominal
|
||||||
abide
|
abide
|
||||||
abiding
|
abiding
|
||||||
ability
|
ability
|
||||||
@ -1029,6 +1031,7 @@ chosen
|
|||||||
chowder
|
chowder
|
||||||
chowtime
|
chowtime
|
||||||
chrome
|
chrome
|
||||||
|
chubby
|
||||||
chuck
|
chuck
|
||||||
chug
|
chug
|
||||||
chummy
|
chummy
|
||||||
@ -2038,6 +2041,8 @@ dwindling
|
|||||||
dynamic
|
dynamic
|
||||||
dynamite
|
dynamite
|
||||||
dynasty
|
dynasty
|
||||||
|
dyslexia
|
||||||
|
dyslexic
|
||||||
each
|
each
|
||||||
eagle
|
eagle
|
||||||
earache
|
earache
|
||||||
@ -2076,6 +2081,7 @@ eatery
|
|||||||
eating
|
eating
|
||||||
eats
|
eats
|
||||||
ebay
|
ebay
|
||||||
|
ebony
|
||||||
ebook
|
ebook
|
||||||
ecard
|
ecard
|
||||||
eccentric
|
eccentric
|
||||||
@ -2369,6 +2375,8 @@ exclude
|
|||||||
excluding
|
excluding
|
||||||
exclusion
|
exclusion
|
||||||
exclusive
|
exclusive
|
||||||
|
excretion
|
||||||
|
excretory
|
||||||
excursion
|
excursion
|
||||||
excusable
|
excusable
|
||||||
excusably
|
excusably
|
||||||
@ -2388,6 +2396,8 @@ existing
|
|||||||
exit
|
exit
|
||||||
exodus
|
exodus
|
||||||
exonerate
|
exonerate
|
||||||
|
exorcism
|
||||||
|
exorcist
|
||||||
expand
|
expand
|
||||||
expanse
|
expanse
|
||||||
expansion
|
expansion
|
||||||
@ -2473,6 +2483,7 @@ fanning
|
|||||||
fantasize
|
fantasize
|
||||||
fantastic
|
fantastic
|
||||||
fantasy
|
fantasy
|
||||||
|
fascism
|
||||||
fastball
|
fastball
|
||||||
faster
|
faster
|
||||||
fasting
|
fasting
|
||||||
@ -3017,6 +3028,7 @@ guiding
|
|||||||
guileless
|
guileless
|
||||||
guise
|
guise
|
||||||
gulf
|
gulf
|
||||||
|
gullible
|
||||||
gully
|
gully
|
||||||
gulp
|
gulp
|
||||||
gumball
|
gumball
|
||||||
@ -3028,6 +3040,10 @@ gurgle
|
|||||||
gurgling
|
gurgling
|
||||||
guru
|
guru
|
||||||
gush
|
gush
|
||||||
|
gusto
|
||||||
|
gusty
|
||||||
|
gutless
|
||||||
|
guts
|
||||||
gutter
|
gutter
|
||||||
guy
|
guy
|
||||||
guzzler
|
guzzler
|
||||||
@ -3226,6 +3242,8 @@ humble
|
|||||||
humbling
|
humbling
|
||||||
humbly
|
humbly
|
||||||
humid
|
humid
|
||||||
|
humiliate
|
||||||
|
humility
|
||||||
humming
|
humming
|
||||||
hummus
|
hummus
|
||||||
humongous
|
humongous
|
||||||
@ -3253,6 +3271,7 @@ hurray
|
|||||||
hurricane
|
hurricane
|
||||||
hurried
|
hurried
|
||||||
hurry
|
hurry
|
||||||
|
hurt
|
||||||
husband
|
husband
|
||||||
hush
|
hush
|
||||||
husked
|
husked
|
||||||
@ -3273,6 +3292,8 @@ hypnotic
|
|||||||
hypnotism
|
hypnotism
|
||||||
hypnotist
|
hypnotist
|
||||||
hypnotize
|
hypnotize
|
||||||
|
hypocrisy
|
||||||
|
hypocrite
|
||||||
ibuprofen
|
ibuprofen
|
||||||
ice
|
ice
|
||||||
iciness
|
iciness
|
||||||
@ -3302,6 +3323,7 @@ image
|
|||||||
imaginary
|
imaginary
|
||||||
imagines
|
imagines
|
||||||
imaging
|
imaging
|
||||||
|
imbecile
|
||||||
imitate
|
imitate
|
||||||
imitation
|
imitation
|
||||||
immerse
|
immerse
|
||||||
@ -3724,6 +3746,7 @@ machine
|
|||||||
machinist
|
machinist
|
||||||
magazine
|
magazine
|
||||||
magenta
|
magenta
|
||||||
|
maggot
|
||||||
magical
|
magical
|
||||||
magician
|
magician
|
||||||
magma
|
magma
|
||||||
@ -3945,6 +3968,8 @@ multitude
|
|||||||
mumble
|
mumble
|
||||||
mumbling
|
mumbling
|
||||||
mumbo
|
mumbo
|
||||||
|
mummified
|
||||||
|
mummify
|
||||||
mumps
|
mumps
|
||||||
munchkin
|
munchkin
|
||||||
mundane
|
mundane
|
||||||
@ -3997,6 +4022,8 @@ napped
|
|||||||
napping
|
napping
|
||||||
nappy
|
nappy
|
||||||
narrow
|
narrow
|
||||||
|
nastily
|
||||||
|
nastiness
|
||||||
national
|
national
|
||||||
native
|
native
|
||||||
nativity
|
nativity
|
||||||
@ -4419,6 +4446,7 @@ pasta
|
|||||||
pasted
|
pasted
|
||||||
pastel
|
pastel
|
||||||
pastime
|
pastime
|
||||||
|
pastor
|
||||||
pastrami
|
pastrami
|
||||||
pasture
|
pasture
|
||||||
pasty
|
pasty
|
||||||
@ -4430,6 +4458,7 @@ path
|
|||||||
patience
|
patience
|
||||||
patient
|
patient
|
||||||
patio
|
patio
|
||||||
|
patriarch
|
||||||
patriot
|
patriot
|
||||||
patrol
|
patrol
|
||||||
patronage
|
patronage
|
||||||
@ -4520,6 +4549,7 @@ pettiness
|
|||||||
petty
|
petty
|
||||||
petunia
|
petunia
|
||||||
phantom
|
phantom
|
||||||
|
phobia
|
||||||
phoenix
|
phoenix
|
||||||
phonebook
|
phonebook
|
||||||
phoney
|
phoney
|
||||||
@ -4578,6 +4608,7 @@ plot
|
|||||||
plow
|
plow
|
||||||
ploy
|
ploy
|
||||||
pluck
|
pluck
|
||||||
|
plug
|
||||||
plunder
|
plunder
|
||||||
plunging
|
plunging
|
||||||
plural
|
plural
|
||||||
@ -4844,6 +4875,7 @@ pupil
|
|||||||
puppet
|
puppet
|
||||||
puppy
|
puppy
|
||||||
purchase
|
purchase
|
||||||
|
pureblood
|
||||||
purebred
|
purebred
|
||||||
purely
|
purely
|
||||||
pureness
|
pureness
|
||||||
@ -5015,6 +5047,7 @@ recharger
|
|||||||
recipient
|
recipient
|
||||||
recital
|
recital
|
||||||
recite
|
recite
|
||||||
|
reckless
|
||||||
reclaim
|
reclaim
|
||||||
recliner
|
recliner
|
||||||
reclining
|
reclining
|
||||||
@ -5407,6 +5440,7 @@ rubdown
|
|||||||
ruby
|
ruby
|
||||||
ruckus
|
ruckus
|
||||||
rudder
|
rudder
|
||||||
|
rug
|
||||||
ruined
|
ruined
|
||||||
rule
|
rule
|
||||||
rumble
|
rumble
|
||||||
@ -5414,6 +5448,7 @@ rumbling
|
|||||||
rummage
|
rummage
|
||||||
rumor
|
rumor
|
||||||
runaround
|
runaround
|
||||||
|
rundown
|
||||||
runner
|
runner
|
||||||
running
|
running
|
||||||
runny
|
runny
|
||||||
@ -5483,6 +5518,7 @@ sandpaper
|
|||||||
sandpit
|
sandpit
|
||||||
sandstone
|
sandstone
|
||||||
sandstorm
|
sandstorm
|
||||||
|
sandworm
|
||||||
sandy
|
sandy
|
||||||
sanitary
|
sanitary
|
||||||
sanitizer
|
sanitizer
|
||||||
@ -5505,6 +5541,7 @@ satisfy
|
|||||||
saturate
|
saturate
|
||||||
saturday
|
saturday
|
||||||
sauciness
|
sauciness
|
||||||
|
saucy
|
||||||
sauna
|
sauna
|
||||||
savage
|
savage
|
||||||
savanna
|
savanna
|
||||||
@ -5515,6 +5552,7 @@ savor
|
|||||||
saxophone
|
saxophone
|
||||||
say
|
say
|
||||||
scabbed
|
scabbed
|
||||||
|
scabby
|
||||||
scalded
|
scalded
|
||||||
scalding
|
scalding
|
||||||
scale
|
scale
|
||||||
@ -5549,6 +5587,7 @@ science
|
|||||||
scientist
|
scientist
|
||||||
scion
|
scion
|
||||||
scoff
|
scoff
|
||||||
|
scolding
|
||||||
scone
|
scone
|
||||||
scoop
|
scoop
|
||||||
scooter
|
scooter
|
||||||
@ -5612,6 +5651,8 @@ sedate
|
|||||||
sedation
|
sedation
|
||||||
sedative
|
sedative
|
||||||
sediment
|
sediment
|
||||||
|
seduce
|
||||||
|
seducing
|
||||||
segment
|
segment
|
||||||
seismic
|
seismic
|
||||||
seizing
|
seizing
|
||||||
@ -5858,6 +5899,7 @@ skimpily
|
|||||||
skincare
|
skincare
|
||||||
skinless
|
skinless
|
||||||
skinning
|
skinning
|
||||||
|
skinny
|
||||||
skintight
|
skintight
|
||||||
skipper
|
skipper
|
||||||
skipping
|
skipping
|
||||||
@ -6206,12 +6248,17 @@ stifle
|
|||||||
stifling
|
stifling
|
||||||
stillness
|
stillness
|
||||||
stilt
|
stilt
|
||||||
|
stimulant
|
||||||
|
stimulate
|
||||||
|
stimuli
|
||||||
stimulus
|
stimulus
|
||||||
stinger
|
stinger
|
||||||
stingily
|
stingily
|
||||||
stinging
|
stinging
|
||||||
stingray
|
stingray
|
||||||
stingy
|
stingy
|
||||||
|
stinking
|
||||||
|
stinky
|
||||||
stipend
|
stipend
|
||||||
stipulate
|
stipulate
|
||||||
stir
|
stir
|
||||||
@ -6819,6 +6866,7 @@ unbent
|
|||||||
unbiased
|
unbiased
|
||||||
unbitten
|
unbitten
|
||||||
unblended
|
unblended
|
||||||
|
unblessed
|
||||||
unblock
|
unblock
|
||||||
unbolted
|
unbolted
|
||||||
unbounded
|
unbounded
|
||||||
@ -6899,6 +6947,7 @@ undertone
|
|||||||
undertook
|
undertook
|
||||||
undertow
|
undertow
|
||||||
underuse
|
underuse
|
||||||
|
underwear
|
||||||
underwent
|
underwent
|
||||||
underwire
|
underwire
|
||||||
undesired
|
undesired
|
||||||
@ -6951,6 +7000,7 @@ unfunded
|
|||||||
unglazed
|
unglazed
|
||||||
ungloved
|
ungloved
|
||||||
unglue
|
unglue
|
||||||
|
ungodly
|
||||||
ungraded
|
ungraded
|
||||||
ungreased
|
ungreased
|
||||||
unguarded
|
unguarded
|
||||||
@ -6982,6 +7032,7 @@ uninsured
|
|||||||
uninvited
|
uninvited
|
||||||
union
|
union
|
||||||
uniquely
|
uniquely
|
||||||
|
unisexual
|
||||||
unison
|
unison
|
||||||
unissued
|
unissued
|
||||||
unit
|
unit
|
||||||
@ -7442,6 +7493,8 @@ wheat
|
|||||||
whenever
|
whenever
|
||||||
whiff
|
whiff
|
||||||
whimsical
|
whimsical
|
||||||
|
whinny
|
||||||
|
whiny
|
||||||
whisking
|
whisking
|
||||||
whoever
|
whoever
|
||||||
whole
|
whole
|
||||||
@ -7547,6 +7600,7 @@ wrongness
|
|||||||
wrought
|
wrought
|
||||||
xbox
|
xbox
|
||||||
xerox
|
xerox
|
||||||
|
yahoo
|
||||||
yam
|
yam
|
||||||
yanking
|
yanking
|
||||||
yapping
|
yapping
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
"""Custom Domain partner id
|
|
||||||
|
|
||||||
Revision ID: 2441b7ff5da9
|
|
||||||
Revises: 1c14339aae90
|
|
||||||
Create Date: 2024-09-13 15:43:02.425964
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '2441b7ff5da9'
|
|
||||||
down_revision = '1c14339aae90'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('custom_domain', sa.Column('partner_id', sa.Integer(), nullable=True, default=None, server_default=None))
|
|
||||||
op.create_foreign_key(None, 'custom_domain', 'partner', ['partner_id'], ['id'])
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint(None, 'custom_domain', type_='foreignkey')
|
|
||||||
op.drop_column('custom_domain', 'partner_id')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,31 +0,0 @@
|
|||||||
"""contact.flags and custom_domain.pending_deletion
|
|
||||||
|
|
||||||
Revision ID: 88dd7a0abf54
|
|
||||||
Revises: 2441b7ff5da9
|
|
||||||
Create Date: 2024-09-19 15:41:20.910374
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '88dd7a0abf54'
|
|
||||||
down_revision = '2441b7ff5da9'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('contact', sa.Column('flags', sa.Integer(), server_default='0', nullable=False))
|
|
||||||
op.add_column('custom_domain', sa.Column('pending_deletion', sa.Boolean(), server_default='0', nullable=False))
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column('custom_domain', 'pending_deletion')
|
|
||||||
op.drop_column('contact', 'flags')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,27 +0,0 @@
|
|||||||
"""custom domain indices
|
|
||||||
|
|
||||||
Revision ID: 62afa3a10010
|
|
||||||
Revises: 88dd7a0abf54
|
|
||||||
Create Date: 2024-09-30 11:40:04.127791
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '62afa3a10010'
|
|
||||||
down_revision = '88dd7a0abf54'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.create_index('ix_custom_domain_pending_deletion', 'custom_domain', ['pending_deletion'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index('ix_custom_domain_user_id', 'custom_domain', ['user_id'], unique=False, postgresql_concurrently=True)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.drop_index('ix_custom_domain_user_id', table_name='custom_domain', postgresql_concurrently=True)
|
|
||||||
op.drop_index('ix_custom_domain_pending_deletion', table_name='custom_domain', postgresql_concurrently=True)
|
|
@ -1,45 +0,0 @@
|
|||||||
"""alias_audit_log
|
|
||||||
|
|
||||||
Revision ID: 91ed7f46dc81
|
|
||||||
Revises: 62afa3a10010
|
|
||||||
Create Date: 2024-10-11 13:22:11.594054
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '91ed7f46dc81'
|
|
||||||
down_revision = '62afa3a10010'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('alias_audit_log',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
|
|
||||||
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
|
||||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('alias_id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('alias_email', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('action', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('message', sa.Text(), nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_index('ix_alias_audit_log_alias_email', 'alias_audit_log', ['alias_email'], unique=False)
|
|
||||||
op.create_index('ix_alias_audit_log_alias_id', 'alias_audit_log', ['alias_id'], unique=False)
|
|
||||||
op.create_index('ix_alias_audit_log_user_id', 'alias_audit_log', ['user_id'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index('ix_alias_audit_log_user_id', table_name='alias_audit_log')
|
|
||||||
op.drop_index('ix_alias_audit_log_alias_id', table_name='alias_audit_log')
|
|
||||||
op.drop_index('ix_alias_audit_log_alias_email', table_name='alias_audit_log')
|
|
||||||
op.drop_table('alias_audit_log')
|
|
||||||
# ### end Alembic commands ###
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user