Compare commits
15 Commits
Author | SHA1 | Date | |
---|---|---|---|
89fad50529 | |||
d09b3b992c | |||
ef9c09f76e | |||
0fa4b1b7ee | |||
2904d04a2c | |||
a5801551d0 | |||
9c2a35193c | |||
e47e5a5255 | |||
ed37325b32 | |||
dd6005ffdf | |||
664cd32f81 | |||
33f0eb6c41 | |||
9fd2fa9a78 | |||
3c77f8af4b | |||
545eeda79b |
56
app/.github/workflows/main.yml
vendored
56
app/.github/workflows/main.yml
vendored
@ -1,6 +1,12 @@
|
|||||||
name: Test and lint
|
name: SimpleLogin actions
|
||||||
|
|
||||||
on: [push, pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- v*
|
||||||
|
pull_request:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@ -9,35 +15,29 @@ jobs:
|
|||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install poetry
|
- name: Install uv
|
||||||
run: pipx install poetry
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
# Install a specific version of uv.
|
||||||
cache: 'poetry'
|
version: "0.5.21"
|
||||||
|
enable-cache: true
|
||||||
|
|
||||||
- name: Install OS dependencies
|
- name: Install OS dependencies
|
||||||
if: ${{ matrix.python-version }} == '3.10'
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install -y libre2-dev libpq-dev
|
sudo apt install -y libre2-dev libpq-dev
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
if: steps.setup-uv.outputs.cache-hit != 'true'
|
||||||
run: poetry install --no-interaction
|
run: uv sync --locked --all-extras
|
||||||
|
|
||||||
- name: Check formatting & linting
|
- name: Check formatting & linting
|
||||||
run: |
|
run: |
|
||||||
poetry run pre-commit run --all-files
|
uv run pre-commit run --all-files
|
||||||
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
max-parallel: 4
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.10"]
|
|
||||||
|
|
||||||
# service containers to run with `postgres-job`
|
# service containers to run with `postgres-job`
|
||||||
services:
|
services:
|
||||||
@ -69,23 +69,21 @@ jobs:
|
|||||||
- name: Check out repo
|
- name: Check out repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install poetry
|
- name: Install uv
|
||||||
run: pipx install poetry
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
# Install a specific version of uv.
|
||||||
cache: 'poetry'
|
version: "0.5.21"
|
||||||
|
enable-cache: true
|
||||||
|
|
||||||
- name: Install OS dependencies
|
- name: Install OS dependencies
|
||||||
if: ${{ matrix.python-version }} == '3.10'
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install -y libre2-dev libpq-dev
|
sudo apt install -y libre2-dev libpq-dev
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
if: steps.setup-uv.outputs.cache-hit != 'true'
|
||||||
run: poetry install --no-interaction
|
run: uv sync --locked --all-extras
|
||||||
|
|
||||||
|
|
||||||
- name: Start Redis v6
|
- name: Start Redis v6
|
||||||
@ -95,16 +93,16 @@ jobs:
|
|||||||
|
|
||||||
- name: Run db migration
|
- name: Run db migration
|
||||||
run: |
|
run: |
|
||||||
CONFIG=tests/test.env poetry run alembic upgrade head
|
CONFIG=tests/test.env uv run alembic upgrade head
|
||||||
|
|
||||||
- name: Prepare version file
|
- name: Prepare version file
|
||||||
run: |
|
run: |
|
||||||
scripts/generate-build-info.sh ${{ github.sha }}
|
scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
|
||||||
cat app/build_info.py
|
cat app/build_info.py
|
||||||
|
|
||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: |
|
run: |
|
||||||
poetry run pytest
|
uv run pytest
|
||||||
env:
|
env:
|
||||||
GITHUB_ACTIONS_TEST: true
|
GITHUB_ACTIONS_TEST: true
|
||||||
|
|
||||||
@ -156,7 +154,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare version file
|
- name: Prepare version file
|
||||||
run: |
|
run: |
|
||||||
scripts/generate-build-info.sh ${{ github.sha }}
|
scripts/generate-build-info.sh ${{ github.sha }} ${{ github.ref_name }}
|
||||||
cat app/build_info.py
|
cat app/build_info.py
|
||||||
|
|
||||||
- name: Build image and publish to Docker Registry
|
- name: Build image and publish to Docker Registry
|
||||||
|
1
app/.python-version
Normal file
1
app/.python-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.12.8
|
@ -20,7 +20,7 @@ SimpleLogin backend consists of 2 main components:
|
|||||||
## Install dependencies
|
## Install dependencies
|
||||||
|
|
||||||
The project requires:
|
The project requires:
|
||||||
- Python 3.10 and poetry to manage dependencies
|
- Python 3.10 and uv to manage dependencies
|
||||||
- Node v10 for front-end.
|
- Node v10 for front-end.
|
||||||
- Postgres 13+
|
- Postgres 13+
|
||||||
|
|
||||||
@ -28,7 +28,7 @@ First, install all dependencies by running the following command.
|
|||||||
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry sync
|
uv sync
|
||||||
```
|
```
|
||||||
|
|
||||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||||
@ -55,7 +55,7 @@ brew install -s re2 pybind11
|
|||||||
We use pre-commit to run all our linting and static analysis checks. Please run
|
We use pre-commit to run all our linting and static analysis checks. Please run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run pre-commit install
|
uv run pre-commit install
|
||||||
```
|
```
|
||||||
|
|
||||||
To install it in your development environment.
|
To install it in your development environment.
|
||||||
@ -160,25 +160,25 @@ Here are the small sum-ups of the directory structures and their roles:
|
|||||||
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||||
|
|
||||||
```
|
```
|
||||||
poetry run ruff format .
|
uv run ruff format .
|
||||||
```
|
```
|
||||||
|
|
||||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run flake8
|
uv run flake8
|
||||||
```
|
```
|
||||||
|
|
||||||
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run djlint --check templates
|
uv run djlint --check templates
|
||||||
```
|
```
|
||||||
|
|
||||||
If some files aren't properly formatted, you can format all files with
|
If some files aren't properly formatted, you can format all files with
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run djlint --reformat .
|
uv run djlint --reformat .
|
||||||
```
|
```
|
||||||
|
|
||||||
## Test sending email
|
## Test sending email
|
||||||
@ -215,7 +215,7 @@ python email_handler.py
|
|||||||
4) Send a test email
|
4) Send a test email
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
swaks --to e1@sl.local --from hey@google.com --server 127.0.0.1:20381
|
swaks --to e1@sl.lan --from hey@google.com --server 127.0.0.1:20381
|
||||||
```
|
```
|
||||||
|
|
||||||
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
||||||
@ -239,15 +239,15 @@ brew install python3.10
|
|||||||
# make sure to update the PATH so python, pip point to Python3
|
# make sure to update the PATH so python, pip point to Python3
|
||||||
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
||||||
|
|
||||||
# Although pipx is the recommended way to install poetry,
|
# Although pipx is the recommended way to install uv,
|
||||||
# install pipx via brew will automatically install python 3.12
|
# install pipx via brew will automatically install python 3.12
|
||||||
# and poetry will then use python 3.12
|
# and uv will then use python 3.12
|
||||||
# so we recommend using poetry this way instead
|
# so we recommend using uv this way instead
|
||||||
curl -sSL https://install.python-poetry.org | python3 -
|
curl -sSL https://install.python-uv.org | python3 -
|
||||||
|
|
||||||
poetry install
|
uv install
|
||||||
|
|
||||||
# activate the virtualenv and you should be good to go!
|
# activate the virtualenv and you should be good to go!
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -4,43 +4,47 @@ WORKDIR /code
|
|||||||
COPY ./static/package*.json /code/static/
|
COPY ./static/package*.json /code/static/
|
||||||
RUN cd /code/static && npm ci
|
RUN cd /code/static && npm ci
|
||||||
|
|
||||||
# Main image
|
FROM --platform=linux/amd64 ubuntu:22.04
|
||||||
FROM python:3.10
|
|
||||||
|
ARG UV_VERSION="0.5.21"
|
||||||
|
ARG UV_HASH="e108c300eafae22ad8e6d94519605530f18f8762eb58d2b98a617edfb5d088fc"
|
||||||
|
|
||||||
# Keeps Python from generating .pyc files in the container
|
# Keeps Python from generating .pyc files in the container
|
||||||
ENV PYTHONDONTWRITEBYTECODE 1
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
# Turns off buffering for easier container logging
|
# Turns off buffering for easier container logging
|
||||||
ENV PYTHONUNBUFFERED 1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
# Add poetry to PATH
|
|
||||||
ENV PATH="${PATH}:/root/.local/bin"
|
|
||||||
|
|
||||||
WORKDIR /code
|
WORKDIR /code
|
||||||
|
|
||||||
# Copy poetry files
|
# Copy dependency files
|
||||||
COPY poetry.lock pyproject.toml ./
|
COPY pyproject.toml uv.lock .python-version ./
|
||||||
|
|
||||||
# Install and setup poetry
|
# Install deps
|
||||||
RUN pip install -U pip \
|
RUN apt-get update \
|
||||||
&& apt-get update \
|
&& apt-get install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev build-essential pkg-config cmake ninja-build bash clang \
|
||||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
&& curl -sSL "https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-x86_64-unknown-linux-gnu.tar.gz" > uv.tar.gz \
|
||||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
&& echo "${UV_HASH} uv.tar.gz" | sha256sum -c - \
|
||||||
# Remove curl and netcat from the image
|
&& tar xf uv.tar.gz -C /tmp/ \
|
||||||
&& apt-get purge -y curl netcat-traditional \
|
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uv /usr/bin/uv \
|
||||||
# Run poetry
|
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uvx /usr/bin/uvx \
|
||||||
&& poetry config virtualenvs.create false \
|
&& rm -rf /tmp/uv* \
|
||||||
&& poetry install --no-interaction --no-ansi --no-root \
|
&& rm -f uv.tar.gz \
|
||||||
# Clear apt cache \
|
&& uv python install `cat .python-version` \
|
||||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
&& uv sync --locked \
|
||||||
|
&& apt-get autoremove -y \
|
||||||
|
&& apt-get purge -y curl netcat-traditional build-essential pkg-config cmake ninja-build python3-dev clang\
|
||||||
|
&& apt-get autoremove -y \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
# copy npm packages
|
# copy npm packages
|
||||||
COPY --from=npm /code /code
|
COPY --from=npm /code /code
|
||||||
|
|
||||||
# copy everything else into /code
|
ENV PATH="/code/.venv/bin:$PATH"
|
||||||
COPY . .
|
|
||||||
|
|
||||||
EXPOSE 7777
|
EXPOSE 7777
|
||||||
|
|
||||||
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
|
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
|
||||||
|
@ -3,7 +3,7 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import arrow
|
import sqlalchemy.exc
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
from newrelic import agent
|
from newrelic import agent
|
||||||
from psycopg2.errors import UniqueViolation
|
from psycopg2.errors import UniqueViolation
|
||||||
@ -35,6 +35,7 @@ from app.utils import random_string
|
|||||||
class SLPlanType(Enum):
|
class SLPlanType(Enum):
|
||||||
Free = 1
|
Free = 1
|
||||||
Premium = 2
|
Premium = 2
|
||||||
|
PremiumLifetime = 3
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -58,23 +59,26 @@ class LinkResult:
|
|||||||
strategy: str
|
strategy: str
|
||||||
|
|
||||||
|
|
||||||
def send_user_plan_changed_event(partner_user: PartnerUser) -> Optional[int]:
|
def send_user_plan_changed_event(
|
||||||
|
partner_user: PartnerUser,
|
||||||
|
) -> UserPlanChanged:
|
||||||
subscription_end = partner_user.user.get_active_subscription_end(
|
subscription_end = partner_user.user.get_active_subscription_end(
|
||||||
include_partner_subscription=False
|
include_partner_subscription=False
|
||||||
)
|
)
|
||||||
end_timestamp = None
|
|
||||||
if partner_user.user.lifetime:
|
if partner_user.user.lifetime:
|
||||||
end_timestamp = arrow.get("2038-01-01").timestamp
|
event = UserPlanChanged(lifetime=True)
|
||||||
elif subscription_end:
|
elif subscription_end:
|
||||||
end_timestamp = subscription_end.timestamp
|
event = UserPlanChanged(plan_end_time=subscription_end.timestamp)
|
||||||
event = UserPlanChanged(plan_end_time=end_timestamp)
|
else:
|
||||||
|
event = UserPlanChanged(plan_end_time=None)
|
||||||
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
||||||
Session.flush()
|
Session.flush()
|
||||||
return end_timestamp
|
return event
|
||||||
|
|
||||||
|
|
||||||
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||||
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||||
|
is_lifetime = plan.type == SLPlanType.PremiumLifetime
|
||||||
if plan.type == SLPlanType.Free:
|
if plan.type == SLPlanType.Free:
|
||||||
if sub is not None:
|
if sub is not None:
|
||||||
LOG.i(
|
LOG.i(
|
||||||
@ -83,25 +87,30 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
|||||||
PartnerSubscription.delete(sub.id)
|
PartnerSubscription.delete(sub.id)
|
||||||
agent.record_custom_event("PlanChange", {"plan": "free"})
|
agent.record_custom_event("PlanChange", {"plan": "free"})
|
||||||
else:
|
else:
|
||||||
|
end_time = plan.expiration
|
||||||
|
if plan.type == SLPlanType.PremiumLifetime:
|
||||||
|
end_time = None
|
||||||
if sub is None:
|
if sub is None:
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] with {end_time} / {is_lifetime}"
|
||||||
)
|
)
|
||||||
create_partner_subscription(
|
create_partner_subscription(
|
||||||
partner_user=partner_user,
|
partner_user=partner_user,
|
||||||
expiration=plan.expiration,
|
expiration=end_time,
|
||||||
|
lifetime=is_lifetime,
|
||||||
msg="Upgraded via partner. User did not have a previous partner subscription",
|
msg="Upgraded via partner. User did not have a previous partner subscription",
|
||||||
)
|
)
|
||||||
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
||||||
else:
|
else:
|
||||||
if sub.end_at != plan.expiration:
|
if sub.end_at != plan.expiration or sub.lifetime != is_lifetime:
|
||||||
LOG.i(
|
|
||||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
|
||||||
)
|
|
||||||
agent.record_custom_event(
|
agent.record_custom_event(
|
||||||
"PlanChange", {"plan": "premium", "type": "extension"}
|
"PlanChange", {"plan": "premium", "type": "extension"}
|
||||||
)
|
)
|
||||||
sub.end_at = plan.expiration
|
sub.end_at = plan.expiration if not is_lifetime else None
|
||||||
|
sub.lifetime = is_lifetime
|
||||||
|
LOG.i(
|
||||||
|
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] to {sub.end_at} / {sub.lifetime} "
|
||||||
|
)
|
||||||
emit_user_audit_log(
|
emit_user_audit_log(
|
||||||
user=partner_user.user,
|
user=partner_user.user,
|
||||||
action=UserAuditLogAction.SubscriptionExtended,
|
action=UserAuditLogAction.SubscriptionExtended,
|
||||||
@ -185,7 +194,9 @@ class NewUserStrategy(ClientMergeStrategy):
|
|||||||
user=new_user,
|
user=new_user,
|
||||||
strategy=self.__class__.__name__,
|
strategy=self.__class__.__name__,
|
||||||
)
|
)
|
||||||
except UniqueViolation:
|
except (UniqueViolation, sqlalchemy.exc.IntegrityError) as e:
|
||||||
|
Session.rollback()
|
||||||
|
LOG.debug(f"Got the duplicate user error: {e}")
|
||||||
return self.create_missing_link(canonical_email)
|
return self.create_missing_link(canonical_email)
|
||||||
|
|
||||||
def create_missing_link(self, canonical_email: str):
|
def create_missing_link(self, canonical_email: str):
|
||||||
|
@ -1,21 +1,27 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from flask_admin import BaseView
|
|
||||||
from flask_admin.form import SecureForm
|
|
||||||
from flask_admin.model.template import EndpointLinkRowAction
|
|
||||||
from markupsafe import Markup
|
|
||||||
|
|
||||||
from app import models, s3
|
|
||||||
from flask import redirect, url_for, request, flash, Response
|
from flask import redirect, url_for, request, flash, Response
|
||||||
|
from flask_admin import BaseView
|
||||||
from flask_admin import expose, AdminIndexView
|
from flask_admin import expose, AdminIndexView
|
||||||
from flask_admin.actions import action
|
from flask_admin.actions import action
|
||||||
from flask_admin.contrib import sqla
|
from flask_admin.contrib import sqla
|
||||||
|
from flask_admin.form import SecureForm
|
||||||
|
from flask_admin.model.template import EndpointLinkRowAction
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
from markupsafe import Markup
|
||||||
|
|
||||||
|
from app import models, s3, config
|
||||||
|
from app.custom_domain_validation import (
|
||||||
|
CustomDomainValidation,
|
||||||
|
DomainValidationResult,
|
||||||
|
ExpectedValidationRecords,
|
||||||
|
)
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.dns_utils import get_network_dns_client
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||||
from app.models import (
|
from app.models import (
|
||||||
@ -39,8 +45,10 @@ from app.models import (
|
|||||||
AliasMailbox,
|
AliasMailbox,
|
||||||
AliasAuditLog,
|
AliasAuditLog,
|
||||||
UserAuditLog,
|
UserAuditLog,
|
||||||
|
CustomDomain,
|
||||||
)
|
)
|
||||||
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
||||||
|
from app.proton.proton_unlink import perform_proton_account_unlink
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
@ -118,7 +126,7 @@ class SLAdminIndexView(AdminIndexView):
|
|||||||
if not current_user.is_authenticated or not current_user.is_admin:
|
if not current_user.is_authenticated or not current_user.is_admin:
|
||||||
return redirect(url_for("auth.login", next=request.url))
|
return redirect(url_for("auth.login", next=request.url))
|
||||||
|
|
||||||
return redirect("/admin/email_search")
|
return redirect(url_for("admin.email_search.index"))
|
||||||
|
|
||||||
|
|
||||||
class UserAdmin(SLModelView):
|
class UserAdmin(SLModelView):
|
||||||
@ -773,21 +781,22 @@ class InvalidMailboxDomainAdmin(SLModelView):
|
|||||||
|
|
||||||
|
|
||||||
class EmailSearchResult:
|
class EmailSearchResult:
|
||||||
no_match: bool = True
|
def __init__(self):
|
||||||
alias: Optional[Alias] = None
|
self.no_match: bool = True
|
||||||
alias_audit_log: Optional[List[AliasAuditLog]] = None
|
self.alias: Optional[Alias] = None
|
||||||
mailbox: List[Mailbox] = []
|
self.alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||||
mailbox_count: int = 0
|
self.mailbox: List[Mailbox] = []
|
||||||
deleted_alias: Optional[DeletedAlias] = None
|
self.mailbox_count: int = 0
|
||||||
deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
self.deleted_alias: Optional[DeletedAlias] = None
|
||||||
domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
self.deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||||
domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
self.domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
||||||
user: Optional[User] = None
|
self.domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||||
user_audit_log: Optional[List[UserAuditLog]] = None
|
self.user: Optional[User] = None
|
||||||
query: str
|
self.user_audit_log: Optional[List[UserAuditLog]] = None
|
||||||
|
self.query: str
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_email(email: str) -> EmailSearchResult:
|
def from_request_email(email: str) -> EmailSearchResult:
|
||||||
output = EmailSearchResult()
|
output = EmailSearchResult()
|
||||||
output.query = email
|
output.query = email
|
||||||
alias = Alias.get_by(email=email)
|
alias = Alias.get_by(email=email)
|
||||||
@ -799,7 +808,11 @@ class EmailSearchResult:
|
|||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
user = User.get_by(email=email)
|
try:
|
||||||
|
user_id = int(email)
|
||||||
|
user = User.get(user_id)
|
||||||
|
except ValueError:
|
||||||
|
user = User.get_by(email=email)
|
||||||
if user:
|
if user:
|
||||||
output.user = user
|
output.user = user
|
||||||
output.user_audit_log = (
|
output.user_audit_log = (
|
||||||
@ -905,10 +918,10 @@ class EmailSearchAdmin(BaseView):
|
|||||||
@expose("/", methods=["GET", "POST"])
|
@expose("/", methods=["GET", "POST"])
|
||||||
def index(self):
|
def index(self):
|
||||||
search = EmailSearchResult()
|
search = EmailSearchResult()
|
||||||
email = request.args.get("email")
|
email = request.args.get("query")
|
||||||
if email is not None and len(email) > 0:
|
if email is not None and len(email) > 0:
|
||||||
email = email.strip()
|
email = email.strip()
|
||||||
search = EmailSearchResult.from_email(email)
|
search = EmailSearchResult.from_request_email(email)
|
||||||
|
|
||||||
return self.render(
|
return self.render(
|
||||||
"admin/email_search.html",
|
"admin/email_search.html",
|
||||||
@ -916,3 +929,135 @@ class EmailSearchAdmin(BaseView):
|
|||||||
data=search,
|
data=search,
|
||||||
helper=EmailSearchHelpers,
|
helper=EmailSearchHelpers,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@expose("/partner_unlink", methods=["POST"])
|
||||||
|
def delete_partner_link(self):
|
||||||
|
user_id = request.form.get("user_id")
|
||||||
|
if not user_id:
|
||||||
|
flash("Missing user_id", "error")
|
||||||
|
return redirect(url_for("admin.email_search.index"))
|
||||||
|
try:
|
||||||
|
user_id = int(user_id)
|
||||||
|
except ValueError:
|
||||||
|
flash("Missing user_id", "error")
|
||||||
|
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||||
|
user = User.get(user_id)
|
||||||
|
if user is None:
|
||||||
|
flash("User not found", "error")
|
||||||
|
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||||
|
external_user_id = perform_proton_account_unlink(user, skip_check=True)
|
||||||
|
if not external_user_id:
|
||||||
|
flash("User unlinked", "success")
|
||||||
|
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||||
|
|
||||||
|
AdminAuditLog.create(
|
||||||
|
admin_user_id=user.id,
|
||||||
|
model=User.__class__.__name__,
|
||||||
|
model_id=user.id,
|
||||||
|
action=AuditLogActionEnum.unlink_user.value,
|
||||||
|
data={"external_user_id": external_user_id},
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
return redirect(url_for("admin.email_search.index", query=user_id))
|
||||||
|
|
||||||
|
|
||||||
|
class CustomDomainWithValidationData:
|
||||||
|
def __init__(self, domain: CustomDomain):
|
||||||
|
self.domain: CustomDomain = domain
|
||||||
|
self.ownership_expected: Optional[ExpectedValidationRecords] = None
|
||||||
|
self.ownership_validation: Optional[DomainValidationResult] = None
|
||||||
|
self.mx_expected: Optional[dict[int, ExpectedValidationRecords]] = None
|
||||||
|
self.mx_validation: Optional[DomainValidationResult] = None
|
||||||
|
self.spf_expected: Optional[ExpectedValidationRecords] = None
|
||||||
|
self.spf_validation: Optional[DomainValidationResult] = None
|
||||||
|
self.dkim_expected: {str: ExpectedValidationRecords} = {}
|
||||||
|
self.dkim_validation: {str: str} = {}
|
||||||
|
|
||||||
|
|
||||||
|
class CustomDomainSearchResult:
|
||||||
|
def __init__(self):
|
||||||
|
self.no_match: bool = False
|
||||||
|
self.user: Optional[User] = None
|
||||||
|
self.domains: list[CustomDomainWithValidationData] = []
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_user(user: Optional[User]) -> CustomDomainSearchResult:
|
||||||
|
out = CustomDomainSearchResult()
|
||||||
|
if user is None:
|
||||||
|
out.no_match = True
|
||||||
|
return out
|
||||||
|
out.user = user
|
||||||
|
dns_client = get_network_dns_client()
|
||||||
|
validator = CustomDomainValidation(
|
||||||
|
dkim_domain=config.EMAIL_DOMAIN,
|
||||||
|
partner_domains=config.PARTNER_DNS_CUSTOM_DOMAINS,
|
||||||
|
partner_domains_validation_prefixes=config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES,
|
||||||
|
dns_client=dns_client,
|
||||||
|
)
|
||||||
|
for custom_domain in user.custom_domains:
|
||||||
|
validation_data = CustomDomainWithValidationData(custom_domain)
|
||||||
|
if not custom_domain.ownership_verified:
|
||||||
|
validation_data.ownership_expected = (
|
||||||
|
validator.get_ownership_verification_record(custom_domain)
|
||||||
|
)
|
||||||
|
validation_data.ownership_validation = (
|
||||||
|
validator.validate_domain_ownership(custom_domain)
|
||||||
|
)
|
||||||
|
if not custom_domain.verified:
|
||||||
|
validation_data.mx_expected = validator.get_expected_mx_records(
|
||||||
|
custom_domain
|
||||||
|
)
|
||||||
|
validation_data.mx_validation = validator.validate_mx_records(
|
||||||
|
custom_domain
|
||||||
|
)
|
||||||
|
if not custom_domain.spf_verified:
|
||||||
|
validation_data.spf_expected = validator.get_expected_spf_record(
|
||||||
|
custom_domain
|
||||||
|
)
|
||||||
|
validation_data.spf_validation = validator.validate_spf_records(
|
||||||
|
custom_domain
|
||||||
|
)
|
||||||
|
if not custom_domain.dkim_verified:
|
||||||
|
validation_data.dkim_expected = validator.get_dkim_records(
|
||||||
|
custom_domain
|
||||||
|
)
|
||||||
|
validation_data.dkim_validation = validator.validate_dkim_records(
|
||||||
|
custom_domain
|
||||||
|
)
|
||||||
|
out.domains.append(validation_data)
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
class CustomDomainSearchAdmin(BaseView):
|
||||||
|
def is_accessible(self):
|
||||||
|
return current_user.is_authenticated and current_user.is_admin
|
||||||
|
|
||||||
|
def inaccessible_callback(self, name, **kwargs):
|
||||||
|
# redirect to login page if user doesn't have access
|
||||||
|
flash("You don't have access to the admin page", "error")
|
||||||
|
return redirect(url_for("dashboard.index", next=request.url))
|
||||||
|
|
||||||
|
@expose("/", methods=["GET", "POST"])
|
||||||
|
def index(self):
|
||||||
|
query = request.args.get("user")
|
||||||
|
if query is None:
|
||||||
|
search = CustomDomainSearchResult()
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
user_id = int(query)
|
||||||
|
user = User.get_by(id=user_id)
|
||||||
|
except ValueError:
|
||||||
|
user = User.get_by(email=query)
|
||||||
|
if user is None:
|
||||||
|
cd = CustomDomain.get_by(domain=query)
|
||||||
|
if cd is not None:
|
||||||
|
user = cd.user
|
||||||
|
search = CustomDomainSearchResult.from_user(user)
|
||||||
|
|
||||||
|
return self.render(
|
||||||
|
"admin/custom_domain_search.html",
|
||||||
|
data=search,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
@ -36,6 +36,7 @@ def set_mailboxes_for_alias(
|
|||||||
Mailbox.user_id == user_id,
|
Mailbox.user_id == user_id,
|
||||||
Mailbox.verified == True, # noqa: E712
|
Mailbox.verified == True, # noqa: E712
|
||||||
)
|
)
|
||||||
|
.order_by(Mailbox.id.asc())
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
if len(mailboxes) != len(mailbox_ids):
|
if len(mailboxes) != len(mailbox_ids):
|
||||||
|
@ -191,15 +191,8 @@ def get_alias_infos_with_pagination_v3(
|
|||||||
q = q.order_by(Alias.email.desc())
|
q = q.order_by(Alias.email.desc())
|
||||||
else:
|
else:
|
||||||
# default sorting
|
# default sorting
|
||||||
latest_activity = case(
|
|
||||||
[
|
|
||||||
(Alias.created_at > EmailLog.created_at, Alias.created_at),
|
|
||||||
(Alias.created_at < EmailLog.created_at, EmailLog.created_at),
|
|
||||||
],
|
|
||||||
else_=Alias.created_at,
|
|
||||||
)
|
|
||||||
q = q.order_by(Alias.pinned.desc())
|
q = q.order_by(Alias.pinned.desc())
|
||||||
q = q.order_by(latest_activity.desc())
|
q = q.order_by(func.greatest(Alias.created_at, EmailLog.created_at).desc())
|
||||||
|
|
||||||
q = q.limit(page_limit).offset(page_id * page_size)
|
q = q.limit(page_limit).offset(page_id * page_size)
|
||||||
|
|
||||||
|
@ -299,7 +299,10 @@ def update_alias(alias_id):
|
|||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "mailbox_ids" in data:
|
if "mailbox_ids" in data:
|
||||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
try:
|
||||||
|
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||||
|
except ValueError:
|
||||||
|
return jsonify(error="Invalid mailbox_id"), 400
|
||||||
err = set_mailboxes_for_alias(
|
err = set_mailboxes_for_alias(
|
||||||
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
||||||
)
|
)
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import secrets
|
import secrets
|
||||||
import string
|
import string
|
||||||
|
|
||||||
import facebook
|
|
||||||
import google.oauth2.credentials
|
import google.oauth2.credentials
|
||||||
import googleapiclient.discovery
|
import googleapiclient.discovery
|
||||||
from flask import jsonify, request
|
from flask import jsonify, request
|
||||||
@ -261,6 +260,8 @@ def auth_facebook():
|
|||||||
}
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import facebook
|
||||||
|
|
||||||
data = request.get_json()
|
data = request.get_json()
|
||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
@ -6,12 +6,7 @@ from flask import request
|
|||||||
|
|
||||||
from app import mailbox_utils
|
from app import mailbox_utils
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
from app.dashboard.views.mailbox_detail import verify_mailbox_change
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
|
||||||
mailbox_already_used,
|
|
||||||
email_can_be_used_as_mailbox,
|
|
||||||
)
|
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
|
|
||||||
@ -122,20 +117,10 @@ def update_mailbox(mailbox_id):
|
|||||||
|
|
||||||
if "email" in data:
|
if "email" in data:
|
||||||
new_email = sanitize_email(data.get("email"))
|
new_email = sanitize_email(data.get("email"))
|
||||||
|
|
||||||
if mailbox_already_used(new_email, user):
|
|
||||||
return jsonify(error=f"{new_email} already used"), 400
|
|
||||||
elif not email_can_be_used_as_mailbox(new_email):
|
|
||||||
return (
|
|
||||||
jsonify(
|
|
||||||
error=f"{new_email} cannot be used. Please note a mailbox cannot "
|
|
||||||
f"be a disposable email address"
|
|
||||||
),
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
verify_mailbox_change(user, mailbox, new_email)
|
mailbox_utils.request_mailbox_email_change(user, mailbox, new_email)
|
||||||
|
except mailbox_utils.MailboxError as e:
|
||||||
|
return jsonify(error=e.msg), 400
|
||||||
except SMTPRecipientsRefused:
|
except SMTPRecipientsRefused:
|
||||||
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
|
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
|
||||||
else:
|
else:
|
||||||
@ -145,7 +130,7 @@ def update_mailbox(mailbox_id):
|
|||||||
if "cancel_email_change" in data:
|
if "cancel_email_change" in data:
|
||||||
cancel_email_change = data.get("cancel_email_change")
|
cancel_email_change = data.get("cancel_email_change")
|
||||||
if cancel_email_change:
|
if cancel_email_change:
|
||||||
mailbox.new_email = None
|
mailbox_utils.cancel_email_change(mailbox.id, user)
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from email_validator import EmailNotValidError
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask import jsonify, request
|
from flask import jsonify, request
|
||||||
|
|
||||||
@ -61,8 +62,17 @@ def new_custom_alias_v2():
|
|||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
|
||||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
alias_prefix = data.get("alias_prefix", "")
|
||||||
signed_suffix = data.get("signed_suffix", "").strip()
|
if not isinstance(alias_prefix, str) or not alias_prefix:
|
||||||
|
return jsonify(error="invalid value for alias_prefix"), 400
|
||||||
|
|
||||||
|
alias_prefix = alias_prefix.strip().lower().replace(" ", "")
|
||||||
|
signed_suffix = data.get("signed_suffix", "")
|
||||||
|
if not isinstance(signed_suffix, str) or not signed_suffix:
|
||||||
|
return jsonify(error="invalid value for signed_suffix"), 400
|
||||||
|
|
||||||
|
signed_suffix = signed_suffix.strip()
|
||||||
|
|
||||||
note = data.get("note")
|
note = data.get("note")
|
||||||
alias_prefix = convert_to_id(alias_prefix)
|
alias_prefix = convert_to_id(alias_prefix)
|
||||||
|
|
||||||
@ -93,12 +103,15 @@ def new_custom_alias_v2():
|
|||||||
400,
|
400,
|
||||||
)
|
)
|
||||||
|
|
||||||
alias = Alias.create(
|
try:
|
||||||
user_id=user.id,
|
alias = Alias.create(
|
||||||
email=full_alias,
|
user_id=user.id,
|
||||||
mailbox_id=user.default_mailbox_id,
|
email=full_alias,
|
||||||
note=note,
|
mailbox_id=user.default_mailbox_id,
|
||||||
)
|
note=note,
|
||||||
|
)
|
||||||
|
except EmailNotValidError:
|
||||||
|
return jsonify(error="Email is not valid"), 400
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -154,8 +167,16 @@ def new_custom_alias_v3():
|
|||||||
return jsonify(error="request body does not follow the required format"), 400
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
alias_prefix_data = data.get("alias_prefix", "") or ""
|
alias_prefix_data = data.get("alias_prefix", "") or ""
|
||||||
|
|
||||||
|
if not isinstance(alias_prefix_data, str):
|
||||||
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
||||||
signed_suffix = data.get("signed_suffix", "") or ""
|
signed_suffix = data.get("signed_suffix", "") or ""
|
||||||
|
|
||||||
|
if not isinstance(signed_suffix, str):
|
||||||
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
signed_suffix = signed_suffix.strip()
|
signed_suffix = signed_suffix.strip()
|
||||||
|
|
||||||
mailbox_ids = data.get("mailbox_ids")
|
mailbox_ids = data.get("mailbox_ids")
|
||||||
|
@ -12,7 +12,7 @@ from app.models import (
|
|||||||
SenderFormatEnum,
|
SenderFormatEnum,
|
||||||
AliasSuffixEnum,
|
AliasSuffixEnum,
|
||||||
)
|
)
|
||||||
from app.proton.utils import perform_proton_account_unlink
|
from app.proton.proton_unlink import perform_proton_account_unlink
|
||||||
|
|
||||||
|
|
||||||
def setting_to_dict(user: User):
|
def setting_to_dict(user: User):
|
||||||
@ -144,5 +144,6 @@ def get_available_domains_for_random_alias_v2():
|
|||||||
@require_api_auth
|
@require_api_auth
|
||||||
def unlink_proton_account():
|
def unlink_proton_account():
|
||||||
user = g.user
|
user = g.user
|
||||||
perform_proton_account_unlink(user)
|
if not perform_proton_account_unlink(user):
|
||||||
|
return jsonify(error="The account cannot be unlinked"), 400
|
||||||
return jsonify({"ok": True})
|
return jsonify({"ok": True})
|
||||||
|
@ -2,7 +2,7 @@ from flask import jsonify, g
|
|||||||
from sqlalchemy_utils.types.arrow import arrow
|
from sqlalchemy_utils.types.arrow import arrow
|
||||||
|
|
||||||
from app.api.base import api_bp, require_api_sudo, require_api_auth
|
from app.api.base import api_bp, require_api_sudo, require_api_auth
|
||||||
from app import config
|
from app.constants import JobType
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Job, ApiToCookieToken
|
from app.models import Job, ApiToCookieToken
|
||||||
@ -24,7 +24,7 @@ def delete_user():
|
|||||||
)
|
)
|
||||||
LOG.w("schedule delete account job for %s", g.user)
|
LOG.w("schedule delete account job for %s", g.user)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_DELETE_ACCOUNT,
|
name=JobType.DELETE_ACCOUNT.value,
|
||||||
payload={"user_id": g.user.id},
|
payload={"user_id": g.user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
@ -44,6 +44,8 @@ def get_api_session_token():
|
|||||||
token: "asdli3ldq39h9hd3",
|
token: "asdli3ldq39h9hd3",
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
if not g.api_key:
|
||||||
|
return jsonify(ok=False), 401
|
||||||
token = ApiToCookieToken.create(
|
token = ApiToCookieToken.create(
|
||||||
user=g.user,
|
user=g.user,
|
||||||
api_key_id=g.api_key.id,
|
api_key_id=g.api_key.id,
|
||||||
|
@ -12,7 +12,7 @@ from app.dashboard.views.index import get_stats
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.image_validation import detect_image_format, ImageFormat
|
from app.image_validation import detect_image_format, ImageFormat
|
||||||
from app.models import ApiKey, File, PartnerUser, User
|
from app.models import ApiKey, File, PartnerUser, User
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from app.session import logout_session
|
from app.session import logout_session
|
||||||
from app.utils import random_string
|
from app.utils import random_string
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ from app.proton.proton_callback_handler import (
|
|||||||
ProtonCallbackHandler,
|
ProtonCallbackHandler,
|
||||||
Action,
|
Action,
|
||||||
)
|
)
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from app.utils import sanitize_next_url, sanitize_scheme
|
from app.utils import sanitize_next_url, sanitize_scheme
|
||||||
|
|
||||||
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
|
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
SHA1 = "dev"
|
SHA1 = "dev"
|
||||||
BUILD_TIME = "1652365083"
|
BUILD_TIME = "1652365083"
|
||||||
|
VERSION = SHA1
|
||||||
|
@ -62,6 +62,17 @@ def get_env_dict(env_var: str) -> dict[str, str]:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_env_csv(env_var: str, default: Optional[str]) -> list[str]:
|
||||||
|
"""
|
||||||
|
Get an env variable and convert it into a list of strings separated by,
|
||||||
|
Syntax is: val1,val2
|
||||||
|
"""
|
||||||
|
value = os.getenv(env_var, default)
|
||||||
|
if not value:
|
||||||
|
return []
|
||||||
|
return [field.strip() for field in value.split(",") if field.strip()]
|
||||||
|
|
||||||
|
|
||||||
config_file = os.environ.get("CONFIG")
|
config_file = os.environ.get("CONFIG")
|
||||||
if config_file:
|
if config_file:
|
||||||
config_file = get_abs_path(config_file)
|
config_file = get_abs_path(config_file)
|
||||||
@ -171,6 +182,14 @@ FIRST_ALIAS_DOMAIN = os.environ.get("FIRST_ALIAS_DOMAIN") or EMAIL_DOMAIN
|
|||||||
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
|
# e.g. [(10, "mx1.hostname."), (10, "mx2.hostname.")]
|
||||||
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
|
EMAIL_SERVERS_WITH_PRIORITY = sl_getenv("EMAIL_SERVERS_WITH_PRIORITY")
|
||||||
|
|
||||||
|
PROTON_MX_SERVERS = get_env_csv(
|
||||||
|
"PROTON_MX_SERVERS", "mail.protonmail.ch., mailsec.protonmail.ch."
|
||||||
|
)
|
||||||
|
|
||||||
|
PROTON_EMAIL_DOMAINS = get_env_csv(
|
||||||
|
"PROTON_EMAIL_DOMAINS", "proton.me, protonmail.com, protonmail.ch, proton.ch, pm.me"
|
||||||
|
)
|
||||||
|
|
||||||
# disable the alias suffix, i.e. the ".random_word" part
|
# disable the alias suffix, i.e. the ".random_word" part
|
||||||
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
|
DISABLE_ALIAS_SUFFIX = "DISABLE_ALIAS_SUFFIX" in os.environ
|
||||||
|
|
||||||
@ -297,20 +316,6 @@ MFA_USER_ID = "mfa_user_id"
|
|||||||
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
|
FLASK_PROFILER_PATH = os.environ.get("FLASK_PROFILER_PATH")
|
||||||
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
|
FLASK_PROFILER_PASSWORD = os.environ.get("FLASK_PROFILER_PASSWORD")
|
||||||
|
|
||||||
# Job names
|
|
||||||
JOB_ONBOARDING_1 = "onboarding-1"
|
|
||||||
JOB_ONBOARDING_2 = "onboarding-2"
|
|
||||||
JOB_ONBOARDING_3 = "onboarding-3"
|
|
||||||
JOB_ONBOARDING_4 = "onboarding-4"
|
|
||||||
JOB_BATCH_IMPORT = "batch-import"
|
|
||||||
JOB_DELETE_ACCOUNT = "delete-account"
|
|
||||||
JOB_DELETE_MAILBOX = "delete-mailbox"
|
|
||||||
JOB_DELETE_DOMAIN = "delete-domain"
|
|
||||||
JOB_SEND_USER_REPORT = "send-user-report"
|
|
||||||
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
|
||||||
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
|
||||||
JOB_SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
|
||||||
|
|
||||||
# for pagination
|
# for pagination
|
||||||
PAGE_LIMIT = 20
|
PAGE_LIMIT = 20
|
||||||
|
|
||||||
|
@ -1,2 +1,18 @@
|
|||||||
|
import enum
|
||||||
|
|
||||||
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
||||||
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||||
|
|
||||||
|
|
||||||
|
class JobType(enum.Enum):
|
||||||
|
ONBOARDING_1 = "onboarding-1"
|
||||||
|
ONBOARDING_2 = "onboarding-2"
|
||||||
|
ONBOARDING_4 = "onboarding-4"
|
||||||
|
BATCH_IMPORT = "batch-import"
|
||||||
|
DELETE_ACCOUNT = "delete-account"
|
||||||
|
DELETE_MAILBOX = "delete-mailbox"
|
||||||
|
DELETE_DOMAIN = "delete-domain"
|
||||||
|
SEND_USER_REPORT = "send-user-report"
|
||||||
|
SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||||
|
SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||||
|
SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
||||||
|
149
app/app/coupon_utils.py
Normal file
149
app/app/coupon_utils.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
from sqlalchemy import or_, update, and_
|
||||||
|
|
||||||
|
from app.config import ADMIN_EMAIL
|
||||||
|
from app.db import Session
|
||||||
|
from app.email_utils import send_email
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import (
|
||||||
|
User,
|
||||||
|
ManualSubscription,
|
||||||
|
Coupon,
|
||||||
|
LifetimeCoupon,
|
||||||
|
PartnerSubscription,
|
||||||
|
PartnerUser,
|
||||||
|
)
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
|
class CouponUserCannotRedeemError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def redeem_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
||||||
|
if user.lifetime:
|
||||||
|
LOG.i(f"User {user} is a lifetime SL user. Cannot redeem coupons")
|
||||||
|
raise CouponUserCannotRedeemError()
|
||||||
|
|
||||||
|
sub = user.get_active_subscription()
|
||||||
|
if sub and not isinstance(sub, ManualSubscription):
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has an active subscription that is not manual. Cannot redeem coupon {coupon_code}"
|
||||||
|
)
|
||||||
|
raise CouponUserCannotRedeemError()
|
||||||
|
|
||||||
|
coupon = Coupon.get_by(code=coupon_code)
|
||||||
|
if not coupon:
|
||||||
|
LOG.i(f"User is trying to redeem coupon {coupon_code} that does not exist")
|
||||||
|
return None
|
||||||
|
|
||||||
|
now = arrow.utcnow()
|
||||||
|
stmt = (
|
||||||
|
update(Coupon)
|
||||||
|
.where(
|
||||||
|
and_(
|
||||||
|
Coupon.code == coupon_code,
|
||||||
|
Coupon.used == False, # noqa: E712
|
||||||
|
or_(
|
||||||
|
Coupon.expires_date == None, # noqa: E711
|
||||||
|
Coupon.expires_date > now,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.values(used=True, used_by_user_id=user.id, updated_at=now)
|
||||||
|
)
|
||||||
|
res = Session.execute(stmt)
|
||||||
|
if res.rowcount == 0:
|
||||||
|
LOG.i(f"Coupon {coupon.id} could not be redeemed. It's expired or invalid.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
LOG.i(
|
||||||
|
f"Redeemed normal coupon {coupon.id} for {coupon.nb_year} years by user {user}"
|
||||||
|
)
|
||||||
|
if sub:
|
||||||
|
# renew existing subscription
|
||||||
|
if sub.end_at > arrow.now():
|
||||||
|
sub.end_at = sub.end_at.shift(years=coupon.nb_year)
|
||||||
|
else:
|
||||||
|
sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||||
|
else:
|
||||||
|
# There may be an expired manual subscription
|
||||||
|
sub = ManualSubscription.get_by(user_id=user.id)
|
||||||
|
end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||||
|
if sub:
|
||||||
|
sub.end_at = end_at
|
||||||
|
else:
|
||||||
|
sub = ManualSubscription.create(
|
||||||
|
user_id=user.id,
|
||||||
|
end_at=end_at,
|
||||||
|
comment="using coupon code",
|
||||||
|
is_giveaway=coupon.is_giveaway,
|
||||||
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message=f"User {user} redeemed coupon {coupon.id} for {coupon.nb_year} years",
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user=user,
|
||||||
|
content=EventContent(
|
||||||
|
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
return coupon
|
||||||
|
|
||||||
|
|
||||||
|
def redeem_lifetime_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
||||||
|
if user.lifetime:
|
||||||
|
return None
|
||||||
|
partner_sub = (
|
||||||
|
Session.query(PartnerSubscription)
|
||||||
|
.join(PartnerUser, PartnerUser.id == PartnerSubscription.partner_user_id)
|
||||||
|
.filter(PartnerUser.user_id == user.id, PartnerSubscription.lifetime == True) # noqa: E712
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if partner_sub is not None:
|
||||||
|
return None
|
||||||
|
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=coupon_code)
|
||||||
|
if not coupon:
|
||||||
|
return None
|
||||||
|
|
||||||
|
stmt = (
|
||||||
|
update(LifetimeCoupon)
|
||||||
|
.where(
|
||||||
|
and_(
|
||||||
|
LifetimeCoupon.code == coupon_code,
|
||||||
|
LifetimeCoupon.nb_used > 0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.values(nb_used=LifetimeCoupon.nb_used - 1)
|
||||||
|
)
|
||||||
|
res = Session.execute(stmt)
|
||||||
|
if res.rowcount == 0:
|
||||||
|
LOG.i("Coupon could not be redeemed")
|
||||||
|
return None
|
||||||
|
|
||||||
|
user.lifetime = True
|
||||||
|
user.lifetime_coupon_id = coupon.id
|
||||||
|
if coupon.paid:
|
||||||
|
user.paid_lifetime = True
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user=user,
|
||||||
|
content=EventContent(user_plan_change=UserPlanChanged(lifetime=True)),
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
# notify admin
|
||||||
|
send_email(
|
||||||
|
ADMIN_EMAIL,
|
||||||
|
subject=f"User {user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
||||||
|
plaintext="",
|
||||||
|
html="",
|
||||||
|
)
|
||||||
|
|
||||||
|
return coupon
|
@ -5,7 +5,7 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from app.config import JOB_DELETE_DOMAIN
|
from app.constants import JobType
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import get_email_domain_part
|
from app.email_utils import get_email_domain_part
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
@ -156,7 +156,7 @@ def delete_custom_domain(domain: CustomDomain):
|
|||||||
LOG.w("schedule delete domain job for %s", domain)
|
LOG.w("schedule delete domain job for %s", domain)
|
||||||
domain.pending_deletion = True
|
domain.pending_deletion = True
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JOB_DELETE_DOMAIN,
|
name=JobType.DELETE_DOMAIN.value,
|
||||||
payload={"custom_domain_id": domain.id},
|
payload={"custom_domain_id": domain.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
|
@ -5,9 +5,7 @@ from app import config
|
|||||||
from app.constants import DMARC_RECORD
|
from app.constants import DMARC_RECORD
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import (
|
from app.dns_utils import (
|
||||||
MxRecord,
|
|
||||||
DNSClient,
|
DNSClient,
|
||||||
is_mx_equivalent,
|
|
||||||
get_network_dns_client,
|
get_network_dns_client,
|
||||||
)
|
)
|
||||||
from app.models import CustomDomain
|
from app.models import CustomDomain
|
||||||
@ -21,6 +19,39 @@ class DomainValidationResult:
|
|||||||
errors: [str]
|
errors: [str]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ExpectedValidationRecords:
|
||||||
|
recommended: str
|
||||||
|
allowed: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
def is_mx_equivalent(
|
||||||
|
mx_domains: dict[int, list[str]],
|
||||||
|
expected_mx_domains: dict[int, ExpectedValidationRecords],
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||||
|
mx_domains and ref_mx_domains are list of (priority, domain)
|
||||||
|
|
||||||
|
The priority order is taken into account but not the priority number.
|
||||||
|
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||||
|
"""
|
||||||
|
|
||||||
|
expected_prios = []
|
||||||
|
for prio in expected_mx_domains:
|
||||||
|
expected_prios.append(prio)
|
||||||
|
|
||||||
|
if len(expected_prios) != len(mx_domains):
|
||||||
|
return False
|
||||||
|
|
||||||
|
for prio_position, prio_value in enumerate(sorted(mx_domains.keys())):
|
||||||
|
for domain in mx_domains[prio_value]:
|
||||||
|
if domain not in expected_mx_domains[expected_prios[prio_position]].allowed:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class CustomDomainValidation:
|
class CustomDomainValidation:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -37,59 +68,88 @@ class CustomDomainValidation:
|
|||||||
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
|
def get_ownership_verification_record(
|
||||||
prefix = "sl"
|
self, domain: CustomDomain
|
||||||
|
) -> ExpectedValidationRecords:
|
||||||
|
prefixes = ["sl"]
|
||||||
if (
|
if (
|
||||||
domain.partner_id is not None
|
domain.partner_id is not None
|
||||||
and domain.partner_id in self._partner_domain_validation_prefixes
|
and domain.partner_id in self._partner_domain_validation_prefixes
|
||||||
):
|
):
|
||||||
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
|
prefixes.insert(
|
||||||
|
0, self._partner_domain_validation_prefixes[domain.partner_id]
|
||||||
|
)
|
||||||
|
|
||||||
if not domain.ownership_txt_token:
|
if not domain.ownership_txt_token:
|
||||||
domain.ownership_txt_token = random_string(30)
|
domain.ownership_txt_token = random_string(30)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return f"{prefix}-verification={domain.ownership_txt_token}"
|
valid = [
|
||||||
|
f"{prefix}-verification={domain.ownership_txt_token}" for prefix in prefixes
|
||||||
|
]
|
||||||
|
return ExpectedValidationRecords(recommended=valid[0], allowed=valid)
|
||||||
|
|
||||||
def get_expected_mx_records(self, domain: CustomDomain) -> list[MxRecord]:
|
def get_expected_mx_records(
|
||||||
records = []
|
self, domain: CustomDomain
|
||||||
|
) -> dict[int, ExpectedValidationRecords]:
|
||||||
|
records = {}
|
||||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||||
domain = self._partner_domains[domain.partner_id]
|
domain = self._partner_domains[domain.partner_id]
|
||||||
records.append(MxRecord(10, f"mx1.{domain}."))
|
records[10] = [f"mx1.{domain}."]
|
||||||
records.append(MxRecord(20, f"mx2.{domain}."))
|
records[20] = [f"mx2.{domain}."]
|
||||||
else:
|
# Default ones
|
||||||
# Default ones
|
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
||||||
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
if priority not in records:
|
||||||
records.append(MxRecord(priority, domain))
|
records[priority] = []
|
||||||
|
records[priority].append(domain)
|
||||||
|
|
||||||
return records
|
return {
|
||||||
|
priority: ExpectedValidationRecords(
|
||||||
|
recommended=records[priority][0], allowed=records[priority]
|
||||||
|
)
|
||||||
|
for priority in records
|
||||||
|
}
|
||||||
|
|
||||||
def get_expected_spf_domain(self, domain: CustomDomain) -> str:
|
def get_expected_spf_domain(
|
||||||
|
self, domain: CustomDomain
|
||||||
|
) -> ExpectedValidationRecords:
|
||||||
|
records = []
|
||||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||||
return self._partner_domains[domain.partner_id]
|
records.append(self._partner_domains[domain.partner_id])
|
||||||
else:
|
else:
|
||||||
return config.EMAIL_DOMAIN
|
records.append(config.EMAIL_DOMAIN)
|
||||||
|
return ExpectedValidationRecords(recommended=records[0], allowed=records)
|
||||||
|
|
||||||
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
||||||
spf_domain = self.get_expected_spf_domain(domain)
|
spf_domain = self.get_expected_spf_domain(domain)
|
||||||
return f"v=spf1 include:{spf_domain} ~all"
|
return f"v=spf1 include:{spf_domain.recommended} ~all"
|
||||||
|
|
||||||
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
|
def get_dkim_records(
|
||||||
|
self, domain: CustomDomain
|
||||||
|
) -> {str: ExpectedValidationRecords}:
|
||||||
"""
|
"""
|
||||||
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
||||||
it will return the default ones or the partner ones.
|
it will return the default ones or the partner ones.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# By default use the default domain
|
# By default use the default domain
|
||||||
dkim_domain = self.dkim_domain
|
dkim_domains = [self.dkim_domain]
|
||||||
if domain.partner_id is not None:
|
if domain.partner_id is not None:
|
||||||
# Domain is from a partner. Retrieve the partner config and use that domain if exists
|
# Domain is from a partner. Retrieve the partner config and use that domain as preferred if it exists
|
||||||
dkim_domain = self._partner_domains.get(domain.partner_id, dkim_domain)
|
partner_domain = self._partner_domains.get(domain.partner_id, None)
|
||||||
|
if partner_domain is not None:
|
||||||
|
dkim_domains.insert(0, partner_domain)
|
||||||
|
|
||||||
return {
|
output = {}
|
||||||
f"{key}._domainkey": f"{key}._domainkey.{dkim_domain}"
|
for key in ("dkim", "dkim02", "dkim03"):
|
||||||
for key in ("dkim", "dkim02", "dkim03")
|
records = [
|
||||||
}
|
f"{key}._domainkey.{dkim_domain}" for dkim_domain in dkim_domains
|
||||||
|
]
|
||||||
|
output[f"{key}._domainkey"] = ExpectedValidationRecords(
|
||||||
|
recommended=records[0], allowed=records
|
||||||
|
)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
||||||
"""
|
"""
|
||||||
@ -102,7 +162,7 @@ class CustomDomainValidation:
|
|||||||
for prefix, expected_record in expected_records.items():
|
for prefix, expected_record in expected_records.items():
|
||||||
custom_record = f"{prefix}.{custom_domain.domain}"
|
custom_record = f"{prefix}.{custom_domain.domain}"
|
||||||
dkim_record = self._dns_client.get_cname_record(custom_record)
|
dkim_record = self._dns_client.get_cname_record(custom_record)
|
||||||
if dkim_record == expected_record:
|
if dkim_record in expected_record.allowed:
|
||||||
correct_records[prefix] = custom_record
|
correct_records[prefix] = custom_record
|
||||||
else:
|
else:
|
||||||
invalid_records[custom_record] = dkim_record or "empty"
|
invalid_records[custom_record] = dkim_record or "empty"
|
||||||
@ -138,11 +198,15 @@ class CustomDomainValidation:
|
|||||||
Check if the custom_domain has added the ownership verification records
|
Check if the custom_domain has added the ownership verification records
|
||||||
"""
|
"""
|
||||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||||
expected_verification_record = self.get_ownership_verification_record(
|
expected_verification_records = self.get_ownership_verification_record(
|
||||||
custom_domain
|
custom_domain
|
||||||
)
|
)
|
||||||
|
found = False
|
||||||
if expected_verification_record in txt_records:
|
for verification_record in expected_verification_records.allowed:
|
||||||
|
if verification_record in txt_records:
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if found:
|
||||||
custom_domain.ownership_verified = True
|
custom_domain.ownership_verified = True
|
||||||
emit_user_audit_log(
|
emit_user_audit_log(
|
||||||
user=custom_domain.user,
|
user=custom_domain.user,
|
||||||
@ -161,10 +225,11 @@ class CustomDomainValidation:
|
|||||||
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
||||||
|
|
||||||
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
||||||
return DomainValidationResult(
|
errors = []
|
||||||
success=False,
|
for prio in mx_domains:
|
||||||
errors=[f"{record.priority} {record.domain}" for record in mx_domains],
|
for mx_domain in mx_domains[prio]:
|
||||||
)
|
errors.append(f"{prio} {mx_domain}")
|
||||||
|
return DomainValidationResult(success=False, errors=errors)
|
||||||
else:
|
else:
|
||||||
custom_domain.verified = True
|
custom_domain.verified = True
|
||||||
emit_user_audit_log(
|
emit_user_audit_log(
|
||||||
@ -180,7 +245,7 @@ class CustomDomainValidation:
|
|||||||
) -> DomainValidationResult:
|
) -> DomainValidationResult:
|
||||||
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
||||||
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
||||||
if expected_spf_domain in spf_domains:
|
if len(set(expected_spf_domain.allowed).intersection(set(spf_domains))) > 0:
|
||||||
custom_domain.spf_verified = True
|
custom_domain.spf_verified = True
|
||||||
emit_user_audit_log(
|
emit_user_audit_log(
|
||||||
user=custom_domain.user,
|
user=custom_domain.user,
|
||||||
@ -221,8 +286,8 @@ class CustomDomainValidation:
|
|||||||
self, txt_records: List[str], custom_domain: CustomDomain
|
self, txt_records: List[str], custom_domain: CustomDomain
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
final_records = []
|
final_records = []
|
||||||
verification_record = self.get_ownership_verification_record(custom_domain)
|
verification_records = self.get_ownership_verification_record(custom_domain)
|
||||||
for record in txt_records:
|
for record in txt_records:
|
||||||
if record != verification_record:
|
if record not in verification_records.allowed:
|
||||||
final_records.append(record)
|
final_records.append(record)
|
||||||
return final_records
|
return final_records
|
||||||
|
@ -39,7 +39,7 @@ from app.models import (
|
|||||||
SenderFormatEnum,
|
SenderFormatEnum,
|
||||||
UnsubscribeBehaviourEnum,
|
UnsubscribeBehaviourEnum,
|
||||||
)
|
)
|
||||||
from app.proton.utils import perform_proton_account_unlink
|
from app.proton.proton_unlink import perform_proton_account_unlink
|
||||||
from app.utils import (
|
from app.utils import (
|
||||||
random_string,
|
random_string,
|
||||||
CSRFValidationForm,
|
CSRFValidationForm,
|
||||||
@ -239,6 +239,8 @@ def unlink_proton_account():
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
|
|
||||||
perform_proton_account_unlink(current_user)
|
if not perform_proton_account_unlink(current_user):
|
||||||
flash("Your Proton account has been unlinked", "success")
|
flash("Account cannot be unlinked", "warning")
|
||||||
|
else:
|
||||||
|
flash("Your Proton account has been unlinked", "success")
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
|
@ -3,7 +3,7 @@ from flask import render_template, flash, request, redirect, url_for
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from app import s3
|
from app import s3
|
||||||
from app.config import JOB_BATCH_IMPORT
|
from app.constants import JobType
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
@ -64,7 +64,7 @@ def batch_import_route():
|
|||||||
|
|
||||||
# Schedule batch import job
|
# Schedule batch import job
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JOB_BATCH_IMPORT,
|
name=JobType.BATCH_IMPORT.value,
|
||||||
payload={"batch_import_id": bi.id},
|
payload={"batch_import_id": bi.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
)
|
)
|
||||||
|
@ -1,17 +1,15 @@
|
|||||||
import arrow
|
import arrow
|
||||||
from flask import render_template, flash, redirect, url_for, request
|
from flask import render_template, flash, redirect, url_for
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
from app import parallel_limiter
|
from app import parallel_limiter
|
||||||
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
||||||
|
from app.coupon_utils import redeem_coupon, CouponUserCannotRedeemError
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
ManualSubscription,
|
|
||||||
Coupon,
|
|
||||||
Subscription,
|
Subscription,
|
||||||
AppleSubscription,
|
AppleSubscription,
|
||||||
CoinbaseSubscription,
|
CoinbaseSubscription,
|
||||||
@ -58,56 +56,23 @@ def coupon_route():
|
|||||||
|
|
||||||
if coupon_form.validate_on_submit():
|
if coupon_form.validate_on_submit():
|
||||||
code = coupon_form.code.data
|
code = coupon_form.code.data
|
||||||
|
try:
|
||||||
coupon: Coupon = Coupon.get_by(code=code)
|
coupon = redeem_coupon(code, current_user)
|
||||||
if coupon and not coupon.used:
|
if coupon:
|
||||||
if coupon.expires_date and coupon.expires_date < arrow.now():
|
|
||||||
flash(
|
|
||||||
f"The coupon was expired on {coupon.expires_date.humanize()}",
|
|
||||||
"error",
|
|
||||||
)
|
|
||||||
return redirect(request.url)
|
|
||||||
|
|
||||||
updated = (
|
|
||||||
Session.query(Coupon)
|
|
||||||
.filter_by(code=code, used=False)
|
|
||||||
.update({"used_by_user_id": current_user.id, "used": True})
|
|
||||||
)
|
|
||||||
if updated != 1:
|
|
||||||
flash("Coupon is not valid", "error")
|
|
||||||
return redirect(request.url)
|
|
||||||
|
|
||||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
|
||||||
user_id=current_user.id
|
|
||||||
)
|
|
||||||
if manual_sub:
|
|
||||||
# renew existing subscription
|
|
||||||
if manual_sub.end_at > arrow.now():
|
|
||||||
manual_sub.end_at = manual_sub.end_at.shift(years=coupon.nb_year)
|
|
||||||
else:
|
|
||||||
manual_sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
|
||||||
Session.commit()
|
|
||||||
flash(
|
|
||||||
f"Your current subscription is extended to {manual_sub.end_at.humanize()}",
|
|
||||||
"success",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
ManualSubscription.create(
|
|
||||||
user_id=current_user.id,
|
|
||||||
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
|
|
||||||
comment="using coupon code",
|
|
||||||
is_giveaway=coupon.is_giveaway,
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
flash(
|
flash(
|
||||||
"Your account has been upgraded to Premium, thanks for your support!",
|
"Your account has been upgraded to Premium, thanks for your support!",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
return redirect(url_for("dashboard.index"))
|
flash(
|
||||||
|
"This coupon cannot be redeemed. It's invalid or has expired",
|
||||||
else:
|
"warning",
|
||||||
flash(f"Code *{code}* expired or invalid", "warning")
|
)
|
||||||
|
except CouponUserCannotRedeemError:
|
||||||
|
flash(
|
||||||
|
"You have an active subscription. Please remove it before redeeming a coupon",
|
||||||
|
"warning",
|
||||||
|
)
|
||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"dashboard/coupon.html",
|
"dashboard/coupon.html",
|
||||||
|
@ -3,7 +3,7 @@ from flask import flash, redirect, url_for, request, render_template
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
|
|
||||||
from app.config import JOB_DELETE_ACCOUNT
|
from app.constants import JobType
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
@ -40,7 +40,7 @@ def delete_account():
|
|||||||
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JOB_DELETE_ACCOUNT,
|
name=JobType.DELETE_ACCOUNT.value,
|
||||||
payload={"user_id": current_user.id},
|
payload={"user_id": current_user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
|
@ -5,8 +5,8 @@ from flask_login import login_required, current_user
|
|||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators, IntegerField
|
from wtforms import StringField, validators, IntegerField
|
||||||
|
|
||||||
from app.constants import DMARC_RECORD
|
|
||||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
||||||
|
from app.constants import DMARC_RECORD
|
||||||
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
||||||
from app.custom_domain_validation import CustomDomainValidation
|
from app.custom_domain_validation import CustomDomainValidation
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
@ -137,7 +137,7 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
return render_template(
|
return render_template(
|
||||||
"dashboard/domain_detail/dns.html",
|
"dashboard/domain_detail/dns.html",
|
||||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||||
ownership_record=domain_validator.get_ownership_verification_record(
|
ownership_records=domain_validator.get_ownership_verification_record(
|
||||||
custom_domain
|
custom_domain
|
||||||
),
|
),
|
||||||
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
||||||
|
@ -11,7 +11,7 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import PartnerUser, SocialAuth
|
from app.models import PartnerUser, SocialAuth
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from app.utils import sanitize_next_url
|
from app.utils import sanitize_next_url
|
||||||
|
|
||||||
_SUDO_GAP = 120
|
_SUDO_GAP = 120
|
||||||
|
@ -1,16 +1,11 @@
|
|||||||
import arrow
|
|
||||||
from flask import render_template, flash, redirect, url_for
|
from flask import render_template, flash, redirect, url_for
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
from app.config import ADMIN_EMAIL
|
from app import parallel_limiter
|
||||||
|
from app.coupon_utils import redeem_lifetime_coupon
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
|
||||||
from app.email_utils import send_email
|
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
|
||||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
|
||||||
from app.models import LifetimeCoupon
|
|
||||||
|
|
||||||
|
|
||||||
class CouponForm(FlaskForm):
|
class CouponForm(FlaskForm):
|
||||||
@ -19,6 +14,7 @@ class CouponForm(FlaskForm):
|
|||||||
|
|
||||||
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
|
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
|
@parallel_limiter.lock()
|
||||||
def lifetime_licence():
|
def lifetime_licence():
|
||||||
if current_user.lifetime:
|
if current_user.lifetime:
|
||||||
flash("You already have a lifetime licence", "warning")
|
flash("You already have a lifetime licence", "warning")
|
||||||
@ -35,36 +31,12 @@ def lifetime_licence():
|
|||||||
|
|
||||||
if coupon_form.validate_on_submit():
|
if coupon_form.validate_on_submit():
|
||||||
code = coupon_form.code.data
|
code = coupon_form.code.data
|
||||||
|
coupon = redeem_lifetime_coupon(code, current_user)
|
||||||
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=code)
|
if coupon:
|
||||||
if coupon and coupon.nb_used > 0:
|
|
||||||
coupon.nb_used -= 1
|
|
||||||
current_user.lifetime = True
|
|
||||||
current_user.lifetime_coupon_id = coupon.id
|
|
||||||
if coupon.paid:
|
|
||||||
current_user.paid_lifetime = True
|
|
||||||
EventDispatcher.send_event(
|
|
||||||
user=current_user,
|
|
||||||
content=EventContent(
|
|
||||||
user_plan_change=UserPlanChanged(
|
|
||||||
plan_end_time=arrow.get("2038-01-01").timestamp
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
# notify admin
|
|
||||||
send_email(
|
|
||||||
ADMIN_EMAIL,
|
|
||||||
subject=f"User {current_user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
|
||||||
plaintext="",
|
|
||||||
html="",
|
|
||||||
)
|
|
||||||
|
|
||||||
flash("You are upgraded to lifetime premium!", "success")
|
flash("You are upgraded to lifetime premium!", "success")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
flash(f"Code *{code}* expired or invalid", "warning")
|
flash("Coupon code expired or invalid", "warning")
|
||||||
|
|
||||||
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)
|
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)
|
||||||
|
@ -1,23 +1,23 @@
|
|||||||
from smtplib import SMTPRecipientsRefused
|
|
||||||
|
|
||||||
from email_validator import validate_email, EmailNotValidError
|
from email_validator import validate_email, EmailNotValidError
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from itsdangerous import TimestampSigner
|
from itsdangerous import TimestampSigner
|
||||||
from wtforms import validators
|
from wtforms import validators
|
||||||
from wtforms.fields.html5 import EmailField
|
from wtforms.fields.simple import StringField
|
||||||
|
|
||||||
|
from app import mailbox_utils
|
||||||
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
||||||
from app.config import URL
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import email_can_be_used_as_mailbox
|
|
||||||
from app.email_utils import mailbox_already_used, render, send_email
|
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.mailbox_utils import perform_mailbox_email_change, MailboxEmailChangeError
|
from app.mailbox_utils import (
|
||||||
from app.models import Alias, AuthorizedAddress
|
perform_mailbox_email_change,
|
||||||
|
MailboxEmailChangeError,
|
||||||
|
MailboxError,
|
||||||
|
)
|
||||||
|
from app.models import AuthorizedAddress
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
@ -25,7 +25,7 @@ from app.utils import sanitize_email, CSRFValidationForm
|
|||||||
|
|
||||||
|
|
||||||
class ChangeEmailForm(FlaskForm):
|
class ChangeEmailForm(FlaskForm):
|
||||||
email = EmailField(
|
email = StringField(
|
||||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -56,34 +56,19 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
request.form.get("form-name") == "update-email"
|
request.form.get("form-name") == "update-email"
|
||||||
and change_email_form.validate_on_submit()
|
and change_email_form.validate_on_submit()
|
||||||
):
|
):
|
||||||
new_email = sanitize_email(change_email_form.email.data)
|
try:
|
||||||
if new_email != mailbox.email and not pending_email:
|
response = mailbox_utils.request_mailbox_email_change(
|
||||||
# check if this email is not already used
|
current_user, mailbox, change_email_form.email.data
|
||||||
if mailbox_already_used(new_email, current_user) or Alias.get_by(
|
)
|
||||||
email=new_email
|
flash(
|
||||||
):
|
f"You are going to receive an email to confirm {mailbox.email}.",
|
||||||
flash(f"Email {new_email} already used", "error")
|
"success",
|
||||||
elif not email_can_be_used_as_mailbox(new_email):
|
)
|
||||||
flash("You cannot use this email address as your mailbox", "error")
|
except mailbox_utils.MailboxError as e:
|
||||||
else:
|
flash(e.msg, "error")
|
||||||
mailbox.new_email = new_email
|
return redirect(
|
||||||
Session.commit()
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
|
)
|
||||||
try:
|
|
||||||
verify_mailbox_change(current_user, mailbox, new_email)
|
|
||||||
except SMTPRecipientsRefused:
|
|
||||||
flash(
|
|
||||||
f"Incorrect mailbox, please recheck {mailbox.email}",
|
|
||||||
"error",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
flash(
|
|
||||||
f"You are going to receive an email to confirm {new_email}.",
|
|
||||||
"success",
|
|
||||||
)
|
|
||||||
return redirect(
|
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
|
||||||
)
|
|
||||||
elif request.form.get("form-name") == "force-spf":
|
elif request.form.get("form-name") == "force-spf":
|
||||||
if not ENFORCE_SPF:
|
if not ENFORCE_SPF:
|
||||||
flash("SPF enforcement globally not enabled", "error")
|
flash("SPF enforcement globally not enabled", "error")
|
||||||
@ -265,81 +250,57 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
return render_template("dashboard/mailbox_detail.html", **locals())
|
return render_template("dashboard/mailbox_detail.html", **locals())
|
||||||
|
|
||||||
|
|
||||||
def verify_mailbox_change(user, mailbox, new_email):
|
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
|
||||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
|
||||||
verification_url = (
|
|
||||||
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
|
|
||||||
)
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
new_email,
|
|
||||||
"Confirm mailbox change on SimpleLogin",
|
|
||||||
render(
|
|
||||||
"transactional/verify-mailbox-change.txt.jinja2",
|
|
||||||
user=user,
|
|
||||||
link=verification_url,
|
|
||||||
mailbox_email=mailbox.email,
|
|
||||||
mailbox_new_email=new_email,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/verify-mailbox-change.html",
|
|
||||||
user=user,
|
|
||||||
link=verification_url,
|
|
||||||
mailbox_email=mailbox.email,
|
|
||||||
mailbox_new_email=new_email,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route(
|
@dashboard_bp.route(
|
||||||
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
|
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
|
||||||
)
|
)
|
||||||
@login_required
|
@login_required
|
||||||
def cancel_mailbox_change_route(mailbox_id):
|
def cancel_mailbox_change_route(mailbox_id):
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
try:
|
||||||
if not mailbox or mailbox.user_id != current_user.id:
|
mailbox_utils.cancel_email_change(mailbox_id, current_user)
|
||||||
flash("You cannot see this page", "warning")
|
|
||||||
return redirect(url_for("dashboard.index"))
|
|
||||||
|
|
||||||
if mailbox.new_email:
|
|
||||||
mailbox.new_email = None
|
|
||||||
Session.commit()
|
|
||||||
flash("Your mailbox change is cancelled", "success")
|
flash("Your mailbox change is cancelled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
else:
|
except MailboxError as e:
|
||||||
flash("You have no pending mailbox change", "warning")
|
flash(e.msg, "warning")
|
||||||
return redirect(
|
return redirect(url_for("dashboard.index"))
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/mailbox/confirm_change")
|
@dashboard_bp.route("/mailbox/confirm_change")
|
||||||
|
@login_required
|
||||||
|
@limiter.limit("3/minute")
|
||||||
def mailbox_confirm_email_change_route():
|
def mailbox_confirm_email_change_route():
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
mailbox_id = request.args.get("mailbox_id")
|
||||||
signed_mailbox_id = request.args.get("mailbox_id")
|
|
||||||
|
|
||||||
try:
|
code = request.args.get("code")
|
||||||
mailbox_id = int(s.unsign(signed_mailbox_id, max_age=900))
|
if code:
|
||||||
except Exception:
|
try:
|
||||||
flash("Invalid link", "error")
|
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||||
return redirect(url_for("dashboard.index"))
|
flash("Successfully changed mailbox email", "success")
|
||||||
|
|
||||||
res = perform_mailbox_email_change(mailbox_id)
|
|
||||||
|
|
||||||
flash(res.message, res.message_category)
|
|
||||||
if res.error:
|
|
||||||
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||||
)
|
)
|
||||||
elif res.error == MailboxEmailChangeError.InvalidId:
|
except mailbox_utils.MailboxError as e:
|
||||||
return redirect(url_for("dashboard.index"))
|
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
||||||
else:
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
raise Exception("Unhandled MailboxEmailChangeError")
|
|
||||||
else:
|
else:
|
||||||
return redirect(
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
try:
|
||||||
)
|
mailbox_id = int(s.unsign(mailbox_id, max_age=900))
|
||||||
|
res = perform_mailbox_email_change(mailbox_id)
|
||||||
|
flash(res.message, res.message_category)
|
||||||
|
if res.error:
|
||||||
|
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
||||||
|
return redirect(
|
||||||
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
|
)
|
||||||
|
elif res.error == MailboxEmailChangeError.InvalidId:
|
||||||
|
return redirect(url_for("dashboard.index"))
|
||||||
|
else:
|
||||||
|
raise Exception("Unhandled MailboxEmailChangeError")
|
||||||
|
except Exception:
|
||||||
|
flash("Invalid link", "error")
|
||||||
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
|
flash("Successfully changed mailbox email", "success")
|
||||||
|
return redirect(url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id))
|
||||||
|
@ -22,7 +22,7 @@ from app.models import (
|
|||||||
PartnerUser,
|
PartnerUser,
|
||||||
PartnerSubscription,
|
PartnerSubscription,
|
||||||
)
|
)
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
|
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
|
||||||
|
@ -41,7 +41,8 @@ from app.models import (
|
|||||||
PartnerSubscription,
|
PartnerSubscription,
|
||||||
UnsubscribeBehaviourEnum,
|
UnsubscribeBehaviourEnum,
|
||||||
)
|
)
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
|
from app.proton.proton_unlink import can_unlink_proton_account
|
||||||
from app.utils import (
|
from app.utils import (
|
||||||
random_string,
|
random_string,
|
||||||
CSRFValidationForm,
|
CSRFValidationForm,
|
||||||
@ -174,7 +175,12 @@ def setting():
|
|||||||
flash("Your preference has been updated", "success")
|
flash("Your preference has been updated", "success")
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
elif request.form.get("form-name") == "random-alias-suffix":
|
elif request.form.get("form-name") == "random-alias-suffix":
|
||||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
try:
|
||||||
|
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||||
|
except ValueError:
|
||||||
|
flash("Invalid value", "error")
|
||||||
|
return redirect(url_for("dashboard.setting"))
|
||||||
|
|
||||||
if AliasSuffixEnum.has_value(scheme):
|
if AliasSuffixEnum.has_value(scheme):
|
||||||
current_user.random_alias_suffix = scheme
|
current_user.random_alias_suffix = scheme
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -318,4 +324,5 @@ def setting():
|
|||||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||||
connect_with_proton=CONNECT_WITH_PROTON,
|
connect_with_proton=CONNECT_WITH_PROTON,
|
||||||
proton_linked_account=proton_linked_account,
|
proton_linked_account=proton_linked_account,
|
||||||
|
can_unlink_proton_account=can_unlink_proton_account(current_user),
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""List of clients"""
|
"""List of clients"""
|
||||||
|
|
||||||
from flask import render_template
|
from flask import render_template
|
||||||
from flask_login import current_user, login_required
|
from flask_login import current_user, login_required
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import dns.resolver
|
import dns.resolver
|
||||||
@ -9,42 +8,13 @@ from app.config import NAMESERVERS
|
|||||||
_include_spf = "include:"
|
_include_spf = "include:"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class MxRecord:
|
|
||||||
priority: int
|
|
||||||
domain: str
|
|
||||||
|
|
||||||
|
|
||||||
def is_mx_equivalent(
|
|
||||||
mx_domains: List[MxRecord], ref_mx_domains: List[MxRecord]
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
|
||||||
mx_domains and ref_mx_domains are list of (priority, domain)
|
|
||||||
|
|
||||||
The priority order is taken into account but not the priority number.
|
|
||||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
|
||||||
"""
|
|
||||||
mx_domains = sorted(mx_domains, key=lambda x: x.priority)
|
|
||||||
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x.priority)
|
|
||||||
|
|
||||||
if len(mx_domains) < len(ref_mx_domains):
|
|
||||||
return False
|
|
||||||
|
|
||||||
for actual, expected in zip(mx_domains, ref_mx_domains):
|
|
||||||
if actual.domain != expected.domain:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class DNSClient(ABC):
|
class DNSClient(ABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_spf_domain(self, hostname: str) -> List[str]:
|
def get_spf_domain(self, hostname: str) -> List[str]:
|
||||||
@ -88,21 +58,24 @@ class NetworkDNSClient(DNSClient):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
||||||
"""
|
"""
|
||||||
return list of (priority, domain name) sorted by priority (lowest priority first)
|
return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||||
domain name ends with a "." at the end.
|
domain name ends with a "." at the end.
|
||||||
"""
|
"""
|
||||||
|
ret = {}
|
||||||
try:
|
try:
|
||||||
answers = self._resolver.resolve(hostname, "MX", search=True)
|
answers = self._resolver.resolve(hostname, "MX", search=True)
|
||||||
ret = []
|
|
||||||
for a in answers:
|
for a in answers:
|
||||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||||
parts = record.split(" ")
|
parts = record.split(" ")
|
||||||
ret.append(MxRecord(priority=int(parts[0]), domain=parts[1]))
|
prio = int(parts[0])
|
||||||
return sorted(ret, key=lambda x: x.priority)
|
if prio not in ret:
|
||||||
|
ret[prio] = []
|
||||||
|
ret[prio].append(parts[1])
|
||||||
except Exception:
|
except Exception:
|
||||||
return []
|
pass
|
||||||
|
return ret
|
||||||
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
def get_txt_record(self, hostname: str) -> List[str]:
|
||||||
try:
|
try:
|
||||||
@ -119,14 +92,14 @@ class NetworkDNSClient(DNSClient):
|
|||||||
class InMemoryDNSClient(DNSClient):
|
class InMemoryDNSClient(DNSClient):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.cname_records: dict[str, Optional[str]] = {}
|
self.cname_records: dict[str, Optional[str]] = {}
|
||||||
self.mx_records: dict[str, List[MxRecord]] = {}
|
self.mx_records: dict[int, dict[int, list[str]]] = {}
|
||||||
self.spf_records: dict[str, List[str]] = {}
|
self.spf_records: dict[str, List[str]] = {}
|
||||||
self.txt_records: dict[str, List[str]] = {}
|
self.txt_records: dict[str, List[str]] = {}
|
||||||
|
|
||||||
def set_cname_record(self, hostname: str, cname: str):
|
def set_cname_record(self, hostname: str, cname: str):
|
||||||
self.cname_records[hostname] = cname
|
self.cname_records[hostname] = cname
|
||||||
|
|
||||||
def set_mx_records(self, hostname: str, mx_list: List[MxRecord]):
|
def set_mx_records(self, hostname: str, mx_list: dict[int, list[str]]):
|
||||||
self.mx_records[hostname] = mx_list
|
self.mx_records[hostname] = mx_list
|
||||||
|
|
||||||
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
||||||
@ -135,17 +108,27 @@ class InMemoryDNSClient(DNSClient):
|
|||||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||||
return self.cname_records.get(hostname)
|
return self.cname_records.get(hostname)
|
||||||
|
|
||||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
def get_mx_domains(self, hostname: str) -> dict[int, list[str]]:
|
||||||
mx_list = self.mx_records.get(hostname, [])
|
return self.mx_records.get(hostname, {})
|
||||||
return sorted(mx_list, key=lambda x: x.priority)
|
|
||||||
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
def get_txt_record(self, hostname: str) -> List[str]:
|
||||||
return self.txt_records.get(hostname, [])
|
return self.txt_records.get(hostname, [])
|
||||||
|
|
||||||
|
|
||||||
def get_network_dns_client() -> NetworkDNSClient:
|
global_dns_client: Optional[DNSClient] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_network_dns_client() -> DNSClient:
|
||||||
|
global global_dns_client
|
||||||
|
if global_dns_client is not None:
|
||||||
|
return global_dns_client
|
||||||
return NetworkDNSClient(NAMESERVERS)
|
return NetworkDNSClient(NAMESERVERS)
|
||||||
|
|
||||||
|
|
||||||
def get_mx_domains(hostname: str) -> List[MxRecord]:
|
def set_global_dns_client(dns_client: Optional[DNSClient]):
|
||||||
|
global global_dns_client
|
||||||
|
global_dns_client = dns_client
|
||||||
|
|
||||||
|
|
||||||
|
def get_mx_domains(hostname: str) -> dict[int, list[str]]:
|
||||||
return get_network_dns_client().get_mx_domains(hostname)
|
return get_network_dns_client().get_mx_domains(hostname)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Email headers"""
|
"""Email headers"""
|
||||||
|
|
||||||
MESSAGE_ID = "Message-ID"
|
MESSAGE_ID = "Message-ID"
|
||||||
IN_REPLY_TO = "In-Reply-To"
|
IN_REPLY_TO = "In-Reply-To"
|
||||||
REFERENCES = "References"
|
REFERENCES = "References"
|
||||||
|
@ -657,7 +657,11 @@ def get_mx_domain_list(domain) -> [str]:
|
|||||||
"""
|
"""
|
||||||
priority_domains = get_mx_domains(domain)
|
priority_domains = get_mx_domains(domain)
|
||||||
|
|
||||||
return [d.domain[:-1] for d in priority_domains]
|
mx_domains = []
|
||||||
|
for prio in priority_domains:
|
||||||
|
for domain in priority_domains[prio]:
|
||||||
|
mx_domains.append(domain[:-1])
|
||||||
|
return mx_domains
|
||||||
|
|
||||||
|
|
||||||
def personal_email_already_used(email_address: str) -> bool:
|
def personal_email_already_used(email_address: str) -> bool:
|
||||||
@ -1345,17 +1349,18 @@ def get_queue_id(msg: Message) -> Optional[str]:
|
|||||||
|
|
||||||
received_header = str(msg[headers.RECEIVED])
|
received_header = str(msg[headers.RECEIVED])
|
||||||
if not received_header:
|
if not received_header:
|
||||||
return
|
return None
|
||||||
|
|
||||||
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
|
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
|
||||||
search_result = re.search("with ESMTPS id [0-9a-zA-Z]{1,}", received_header)
|
search_result = re.search(r"with E?SMTP[AS]? id ([0-9a-zA-Z]{1,})", received_header)
|
||||||
if not search_result:
|
if search_result:
|
||||||
return
|
return search_result.group(1)
|
||||||
|
search_result = re.search(
|
||||||
# the "with ESMTPS id 4FxQmw1DXdz2vK2" part
|
r"\(Postfix\)\r\n\tid ([a-zA-Z0-9]{1,});", received_header
|
||||||
with_esmtps = received_header[search_result.start() : search_result.end()]
|
)
|
||||||
|
if search_result:
|
||||||
return with_esmtps[len("with ESMTPS id ") :]
|
return search_result.group(1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def should_ignore_bounce(mail_from: str) -> bool:
|
def should_ignore_bounce(mail_from: str) -> bool:
|
||||||
|
@ -8,7 +8,7 @@ from app.errors import ProtonPartnerNotSetUp
|
|||||||
from app.events.generated import event_pb2
|
from app.events.generated import event_pb2
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, PartnerUser, SyncEvent
|
from app.models import User, PartnerUser, SyncEvent
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
|
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
|
||||||
|
@ -24,7 +24,7 @@ _sym_db = _symbol_database.Default()
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\":\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\x12\x10\n\x08lifetime\x18\x02 \x01(\x08\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x0e\n\x0cUserUnlinked\"\xce\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x12\x39\n\ruser_unlinked\x18\x07 \x01(\x0b\x32 .simplelogin_events.UserUnlinkedH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
||||||
|
|
||||||
_globals = globals()
|
_globals = globals()
|
||||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||||
@ -32,19 +32,21 @@ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
|
|||||||
if not _descriptor._USE_C_DESCRIPTORS:
|
if not _descriptor._USE_C_DESCRIPTORS:
|
||||||
DESCRIPTOR._loaded_options = None
|
DESCRIPTOR._loaded_options = None
|
||||||
_globals['_USERPLANCHANGED']._serialized_start=35
|
_globals['_USERPLANCHANGED']._serialized_start=35
|
||||||
_globals['_USERPLANCHANGED']._serialized_end=75
|
_globals['_USERPLANCHANGED']._serialized_end=93
|
||||||
_globals['_USERDELETED']._serialized_start=77
|
_globals['_USERDELETED']._serialized_start=95
|
||||||
_globals['_USERDELETED']._serialized_end=90
|
_globals['_USERDELETED']._serialized_end=108
|
||||||
_globals['_ALIASCREATED']._serialized_start=92
|
_globals['_ALIASCREATED']._serialized_start=110
|
||||||
_globals['_ALIASCREATED']._serialized_end=184
|
_globals['_ALIASCREATED']._serialized_end=202
|
||||||
_globals['_ALIASSTATUSCHANGED']._serialized_start=186
|
_globals['_ALIASSTATUSCHANGED']._serialized_start=204
|
||||||
_globals['_ALIASSTATUSCHANGED']._serialized_end=270
|
_globals['_ALIASSTATUSCHANGED']._serialized_end=288
|
||||||
_globals['_ALIASDELETED']._serialized_start=272
|
_globals['_ALIASDELETED']._serialized_start=290
|
||||||
_globals['_ALIASDELETED']._serialized_end=313
|
_globals['_ALIASDELETED']._serialized_end=331
|
||||||
_globals['_ALIASCREATEDLIST']._serialized_start=315
|
_globals['_ALIASCREATEDLIST']._serialized_start=333
|
||||||
_globals['_ALIASCREATEDLIST']._serialized_end=383
|
_globals['_ALIASCREATEDLIST']._serialized_end=401
|
||||||
_globals['_EVENTCONTENT']._serialized_start=386
|
_globals['_USERUNLINKED']._serialized_start=403
|
||||||
_globals['_EVENTCONTENT']._serialized_end=789
|
_globals['_USERUNLINKED']._serialized_end=417
|
||||||
_globals['_EVENT']._serialized_start=791
|
_globals['_EVENTCONTENT']._serialized_start=420
|
||||||
_globals['_EVENT']._serialized_end=912
|
_globals['_EVENTCONTENT']._serialized_end=882
|
||||||
|
_globals['_EVENT']._serialized_start=884
|
||||||
|
_globals['_EVENT']._serialized_end=1005
|
||||||
# @@protoc_insertion_point(module_scope)
|
# @@protoc_insertion_point(module_scope)
|
||||||
|
@ -6,10 +6,12 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
|
|||||||
DESCRIPTOR: _descriptor.FileDescriptor
|
DESCRIPTOR: _descriptor.FileDescriptor
|
||||||
|
|
||||||
class UserPlanChanged(_message.Message):
|
class UserPlanChanged(_message.Message):
|
||||||
__slots__ = ("plan_end_time",)
|
__slots__ = ("plan_end_time", "lifetime")
|
||||||
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
LIFETIME_FIELD_NUMBER: _ClassVar[int]
|
||||||
plan_end_time: int
|
plan_end_time: int
|
||||||
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
|
lifetime: bool
|
||||||
|
def __init__(self, plan_end_time: _Optional[int] = ..., lifetime: bool = ...) -> None: ...
|
||||||
|
|
||||||
class UserDeleted(_message.Message):
|
class UserDeleted(_message.Message):
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
@ -55,21 +57,27 @@ class AliasCreatedList(_message.Message):
|
|||||||
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
|
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
|
||||||
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
|
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
|
||||||
|
|
||||||
|
class UserUnlinked(_message.Message):
|
||||||
|
__slots__ = ()
|
||||||
|
def __init__(self) -> None: ...
|
||||||
|
|
||||||
class EventContent(_message.Message):
|
class EventContent(_message.Message):
|
||||||
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
|
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list", "user_unlinked")
|
||||||
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||||
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
|
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||||
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
|
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
USER_UNLINKED_FIELD_NUMBER: _ClassVar[int]
|
||||||
user_plan_change: UserPlanChanged
|
user_plan_change: UserPlanChanged
|
||||||
user_deleted: UserDeleted
|
user_deleted: UserDeleted
|
||||||
alias_created: AliasCreated
|
alias_created: AliasCreated
|
||||||
alias_status_change: AliasStatusChanged
|
alias_status_change: AliasStatusChanged
|
||||||
alias_deleted: AliasDeleted
|
alias_deleted: AliasDeleted
|
||||||
alias_create_list: AliasCreatedList
|
alias_create_list: AliasCreatedList
|
||||||
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
|
user_unlinked: UserUnlinked
|
||||||
|
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ..., user_unlinked: _Optional[_Union[UserUnlinked, _Mapping]] = ...) -> None: ...
|
||||||
|
|
||||||
class Event(_message.Message):
|
class Event(_message.Message):
|
||||||
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
|
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
|
||||||
|
@ -33,8 +33,11 @@ from app.models import (
|
|||||||
SLDomain,
|
SLDomain,
|
||||||
Hibp,
|
Hibp,
|
||||||
AliasHibp,
|
AliasHibp,
|
||||||
|
PartnerUser,
|
||||||
|
PartnerSubscription,
|
||||||
)
|
)
|
||||||
from app.pgp_utils import load_public_key
|
from app.pgp_utils import load_public_key
|
||||||
|
from app.proton.proton_partner import get_proton_partner
|
||||||
|
|
||||||
|
|
||||||
def fake_data():
|
def fake_data():
|
||||||
@ -87,7 +90,7 @@ def fake_data():
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email="hey@google.com",
|
website_email="hey@google.com",
|
||||||
reply_email="rep@sl.local",
|
reply_email="rep@sl.lan",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
EmailLog.create(
|
EmailLog.create(
|
||||||
@ -163,7 +166,7 @@ def fake_data():
|
|||||||
# user_id=user.id,
|
# user_id=user.id,
|
||||||
# alias_id=a.id,
|
# alias_id=a.id,
|
||||||
# website_email=f"contact{i}@example.com",
|
# website_email=f"contact{i}@example.com",
|
||||||
# reply_email=f"rep{i}@sl.local",
|
# reply_email=f"rep{i}@sl.lan",
|
||||||
# )
|
# )
|
||||||
# Session.commit()
|
# Session.commit()
|
||||||
# for _ in range(3):
|
# for _ in range(3):
|
||||||
@ -269,3 +272,27 @@ def fake_data():
|
|||||||
CustomDomain.create(
|
CustomDomain.create(
|
||||||
user_id=user.id, domain="old.com", verified=True, ownership_verified=True
|
user_id=user.id, domain="old.com", verified=True, ownership_verified=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Create a user
|
||||||
|
proton_partner = get_proton_partner()
|
||||||
|
user = User.create(
|
||||||
|
email="test@proton.me",
|
||||||
|
name="Proton test",
|
||||||
|
password="password",
|
||||||
|
activated=True,
|
||||||
|
is_admin=False,
|
||||||
|
intro_shown=True,
|
||||||
|
from_partner=True,
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
pu = PartnerUser.create(
|
||||||
|
user_id=user.id,
|
||||||
|
partner_id=proton_partner.id,
|
||||||
|
partner_email="test@proton.me",
|
||||||
|
external_user_id="DUMMY",
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
PartnerSubscription.create(
|
||||||
|
partner_user_id=pu.id, end_at=arrow.now().shift(years=1, days=1)
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
@ -2,8 +2,8 @@ import urllib
|
|||||||
from email.header import Header
|
from email.header import Header
|
||||||
from email.message import Message
|
from email.message import Message
|
||||||
|
|
||||||
from app.email import headers
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app.email import headers
|
||||||
from app.email_utils import add_or_replace_header, delete_header
|
from app.email_utils import add_or_replace_header, delete_header
|
||||||
from app.handler.unsubscribe_encoder import (
|
from app.handler.unsubscribe_encoder import (
|
||||||
UnsubscribeEncoder,
|
UnsubscribeEncoder,
|
||||||
@ -46,7 +46,11 @@ class UnsubscribeGenerator:
|
|||||||
if start == -1 or end == -1 or start >= end:
|
if start == -1 or end == -1 or start >= end:
|
||||||
continue
|
continue
|
||||||
method = raw_method[start + 1 : end]
|
method = raw_method[start + 1 : end]
|
||||||
url_data = urllib.parse.urlparse(method)
|
try:
|
||||||
|
url_data = urllib.parse.urlparse(method)
|
||||||
|
except ValueError:
|
||||||
|
LOG.debug(f"Unsub has invalid method {method}. Ignoring.")
|
||||||
|
continue
|
||||||
if url_data.scheme == "mailto":
|
if url_data.scheme == "mailto":
|
||||||
if url_data.path == config.UNSUBSCRIBER:
|
if url_data.path == config.UNSUBSCRIBER:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
|
@ -12,6 +12,7 @@ import arrow
|
|||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app.constants import JobType
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email import headers
|
from app.email import headers
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
@ -174,7 +175,7 @@ class ExportUserDataJob:
|
|||||||
jobs_in_db = (
|
jobs_in_db = (
|
||||||
Session.query(Job)
|
Session.query(Job)
|
||||||
.filter(
|
.filter(
|
||||||
Job.name == config.JOB_SEND_USER_REPORT,
|
Job.name == JobType.SEND_USER_REPORT.value,
|
||||||
Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT)
|
Job.payload.op("->")("user_id").cast(sqlalchemy.TEXT)
|
||||||
== str(self._user.id),
|
== str(self._user.id),
|
||||||
Job.taken.is_(False),
|
Job.taken.is_(False),
|
||||||
@ -184,7 +185,7 @@ class ExportUserDataJob:
|
|||||||
if jobs_in_db > 0:
|
if jobs_in_db > 0:
|
||||||
return None
|
return None
|
||||||
return Job.create(
|
return Job.create(
|
||||||
name=config.JOB_SEND_USER_REPORT,
|
name=JobType.SEND_USER_REPORT.value,
|
||||||
payload={"user_id": self._user.id},
|
payload={"user_id": self._user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
|
@ -5,7 +5,7 @@ from typing import Optional
|
|||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
|
|
||||||
from app import config
|
from app.constants import JobType
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
from app.events.generated import event_pb2
|
from app.events.generated import event_pb2
|
||||||
from app.events.generated.event_pb2 import EventContent
|
from app.events.generated.event_pb2 import EventContent
|
||||||
@ -14,7 +14,7 @@ from app.models import (
|
|||||||
Job,
|
Job,
|
||||||
PartnerUser,
|
PartnerUser,
|
||||||
)
|
)
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from events.event_sink import EventSink
|
from events.event_sink import EventSink
|
||||||
|
|
||||||
|
|
||||||
@ -57,14 +57,16 @@ class SendEventToWebhookJob:
|
|||||||
|
|
||||||
return SendEventToWebhookJob(user=user, event=event)
|
return SendEventToWebhookJob(user=user, event=event)
|
||||||
|
|
||||||
def store_job_in_db(self, run_at: Optional[arrow.Arrow]) -> Job:
|
def store_job_in_db(
|
||||||
|
self, run_at: Optional[arrow.Arrow], commit: bool = True
|
||||||
|
) -> Job:
|
||||||
stub = self._event.SerializeToString()
|
stub = self._event.SerializeToString()
|
||||||
return Job.create(
|
return Job.create(
|
||||||
name=config.JOB_SEND_EVENT_TO_WEBHOOK,
|
name=JobType.SEND_EVENT_TO_WEBHOOK.value,
|
||||||
payload={
|
payload={
|
||||||
"user_id": self._user.id,
|
"user_id": self._user.id,
|
||||||
"event": base64.b64encode(stub).decode("utf-8"),
|
"event": base64.b64encode(stub).decode("utf-8"),
|
||||||
},
|
},
|
||||||
run_at=run_at if run_at is not None else arrow.now(),
|
run_at=run_at if run_at is not None else arrow.now(),
|
||||||
commit=True,
|
commit=commit,
|
||||||
)
|
)
|
||||||
|
@ -10,7 +10,7 @@ from app.config import (
|
|||||||
|
|
||||||
# this format allows clickable link to code source in PyCharm
|
# this format allows clickable link to code source in PyCharm
|
||||||
_log_format = (
|
_log_format = (
|
||||||
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - "
|
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - %(request_id)s"
|
||||||
'"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s'
|
'"%(pathname)s:%(lineno)d" - %(funcName)s() - %(message_id)s - %(message)s'
|
||||||
)
|
)
|
||||||
_log_formatter = logging.Formatter(_log_format)
|
_log_formatter = logging.Formatter(_log_format)
|
||||||
@ -37,6 +37,21 @@ class EmailHandlerFilter(logging.Filter):
|
|||||||
return _MESSAGE_ID
|
return _MESSAGE_ID
|
||||||
|
|
||||||
|
|
||||||
|
class RequestIdFilter(logging.Filter):
|
||||||
|
"""automatically add request-id to keep track of a request"""
|
||||||
|
|
||||||
|
def filter(self, record):
|
||||||
|
from flask import g, has_request_context
|
||||||
|
|
||||||
|
request_id = ""
|
||||||
|
if has_request_context() and hasattr(g, "request_id"):
|
||||||
|
ctx_request_id = getattr(g, "request_id")
|
||||||
|
if ctx_request_id:
|
||||||
|
request_id = f"{ctx_request_id} - "
|
||||||
|
record.request_id = request_id
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _get_console_handler():
|
def _get_console_handler():
|
||||||
console_handler = logging.StreamHandler(sys.stdout)
|
console_handler = logging.StreamHandler(sys.stdout)
|
||||||
console_handler.setFormatter(_log_formatter)
|
console_handler.setFormatter(_log_formatter)
|
||||||
@ -54,6 +69,7 @@ def _get_logger(name) -> logging.Logger:
|
|||||||
logger.addHandler(_get_console_handler())
|
logger.addHandler(_get_console_handler())
|
||||||
|
|
||||||
logger.addFilter(EmailHandlerFilter())
|
logger.addFilter(EmailHandlerFilter())
|
||||||
|
logger.addFilter(RequestIdFilter())
|
||||||
|
|
||||||
# no propagation to avoid propagating to root logger
|
# no propagation to avoid propagating to root logger
|
||||||
logger.propagate = False
|
logger.propagate = False
|
||||||
|
@ -2,21 +2,25 @@ import dataclasses
|
|||||||
import secrets
|
import secrets
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.config import JOB_DELETE_MAILBOX
|
from app.constants import JobType
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
mailbox_already_used,
|
mailbox_already_used,
|
||||||
email_can_be_used_as_mailbox,
|
email_can_be_used_as_mailbox,
|
||||||
send_email,
|
send_email,
|
||||||
render,
|
render,
|
||||||
|
get_email_domain_part,
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email
|
from app.email_validation import is_valid_email
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Mailbox, Job, MailboxActivation
|
from app.models import User, Mailbox, Job, MailboxActivation, Alias
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
from app.utils import canonicalize_email, sanitize_email
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
@ -52,26 +56,13 @@ def create_mailbox(
|
|||||||
use_digit_codes: bool = False,
|
use_digit_codes: bool = False,
|
||||||
send_link: bool = True,
|
send_link: bool = True,
|
||||||
) -> CreateMailboxOutput:
|
) -> CreateMailboxOutput:
|
||||||
|
email = sanitize_email(email)
|
||||||
if not user.is_premium():
|
if not user.is_premium():
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} has tried to create mailbox with {email} but is not premium"
|
f"User {user} has tried to create mailbox with {email} but is not premium"
|
||||||
)
|
)
|
||||||
raise OnlyPaidError()
|
raise OnlyPaidError()
|
||||||
if not is_valid_email(email):
|
check_email_for_mailbox(email, user)
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid email")
|
|
||||||
elif mailbox_already_used(email, user):
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to create mailbox with {email} but email is already used"
|
|
||||||
)
|
|
||||||
raise MailboxError("Email already used")
|
|
||||||
elif not email_can_be_used_as_mailbox(email):
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid email")
|
|
||||||
new_mailbox: Mailbox = Mailbox.create(
|
new_mailbox: Mailbox = Mailbox.create(
|
||||||
email=email, user_id=user.id, verified=verified, commit=True
|
email=email, user_id=user.id, verified=verified, commit=True
|
||||||
)
|
)
|
||||||
@ -103,8 +94,29 @@ def create_mailbox(
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def check_email_for_mailbox(email, user):
|
||||||
|
if not is_valid_email(email):
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid email")
|
||||||
|
elif mailbox_already_used(email, user):
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has tried to create mailbox with {email} but email is already used"
|
||||||
|
)
|
||||||
|
raise MailboxError("Email already used")
|
||||||
|
elif not email_can_be_used_as_mailbox(email):
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid email")
|
||||||
|
|
||||||
|
|
||||||
def delete_mailbox(
|
def delete_mailbox(
|
||||||
user: User, mailbox_id: int, transfer_mailbox_id: Optional[int]
|
user: User,
|
||||||
|
mailbox_id: int,
|
||||||
|
transfer_mailbox_id: Optional[int],
|
||||||
|
send_mail: bool = True,
|
||||||
) -> Mailbox:
|
) -> Mailbox:
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
|
||||||
@ -144,12 +156,13 @@ def delete_mailbox(
|
|||||||
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
|
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JOB_DELETE_MAILBOX,
|
name=JobType.DELETE_MAILBOX.value,
|
||||||
payload={
|
payload={
|
||||||
"mailbox_id": mailbox.id,
|
"mailbox_id": mailbox.id,
|
||||||
"transfer_mailbox_id": transfer_mailbox_id
|
"transfer_mailbox_id": transfer_mailbox_id
|
||||||
if transfer_mailbox_id and transfer_mailbox_id > 0
|
if transfer_mailbox_id and transfer_mailbox_id > 0
|
||||||
else None,
|
else None,
|
||||||
|
"send_mail": send_mail,
|
||||||
},
|
},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
@ -176,7 +189,7 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
||||||
)
|
)
|
||||||
raise MailboxError("Invalid mailbox")
|
raise MailboxError("Invalid mailbox")
|
||||||
if mailbox.verified:
|
if mailbox.verified and not mailbox.new_email:
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
||||||
)
|
)
|
||||||
@ -213,13 +226,34 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
activation.tries = activation.tries + 1
|
activation.tries = activation.tries + 1
|
||||||
Session.commit()
|
Session.commit()
|
||||||
raise CannotVerifyError("Invalid activation code")
|
raise CannotVerifyError("Invalid activation code")
|
||||||
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
if mailbox.new_email:
|
||||||
mailbox.verified = True
|
LOG.i(
|
||||||
emit_user_audit_log(
|
f"User {user} has verified mailbox email change from {mailbox.email} to {mailbox.new_email}"
|
||||||
user=user,
|
)
|
||||||
action=UserAuditLogAction.VerifyMailbox,
|
emit_user_audit_log(
|
||||||
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
|
user=user,
|
||||||
)
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
||||||
|
)
|
||||||
|
mailbox.email = mailbox.new_email
|
||||||
|
mailbox.new_email = None
|
||||||
|
mailbox.verified = True
|
||||||
|
elif not mailbox.verified:
|
||||||
|
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
||||||
|
mailbox.verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.VerifyMailbox,
|
||||||
|
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
|
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||||
|
raise MailboxError("That address is already in use")
|
||||||
|
|
||||||
|
else:
|
||||||
|
LOG.i(
|
||||||
|
"User {user} alread has mailbox {mailbox} verified and no pending email change"
|
||||||
|
)
|
||||||
|
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
return mailbox
|
return mailbox
|
||||||
|
|
||||||
@ -244,7 +278,10 @@ def generate_activation_code(
|
|||||||
|
|
||||||
|
|
||||||
def send_verification_email(
|
def send_verification_email(
|
||||||
user: User, mailbox: Mailbox, activation: MailboxActivation, send_link: bool = True
|
user: User,
|
||||||
|
mailbox: Mailbox,
|
||||||
|
activation: MailboxActivation,
|
||||||
|
send_link: bool = True,
|
||||||
):
|
):
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
|
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
|
||||||
@ -279,6 +316,79 @@ def send_verification_email(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def send_change_email(user: User, mailbox: Mailbox, activation: MailboxActivation):
|
||||||
|
verification_url = f"{config.URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox.id}&code={activation.code}"
|
||||||
|
|
||||||
|
send_email(
|
||||||
|
mailbox.new_email,
|
||||||
|
"Confirm mailbox change on SimpleLogin",
|
||||||
|
render(
|
||||||
|
"transactional/verify-mailbox-change.txt.jinja2",
|
||||||
|
user=user,
|
||||||
|
link=verification_url,
|
||||||
|
mailbox_email=mailbox.email,
|
||||||
|
mailbox_new_email=mailbox.new_email,
|
||||||
|
),
|
||||||
|
render(
|
||||||
|
"transactional/verify-mailbox-change.html",
|
||||||
|
user=user,
|
||||||
|
link=verification_url,
|
||||||
|
mailbox_email=mailbox.email,
|
||||||
|
mailbox_new_email=mailbox.new_email,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def request_mailbox_email_change(
|
||||||
|
user: User,
|
||||||
|
mailbox: Mailbox,
|
||||||
|
new_email: str,
|
||||||
|
email_ownership_verified: bool = False,
|
||||||
|
send_email: bool = True,
|
||||||
|
use_digit_codes: bool = False,
|
||||||
|
) -> CreateMailboxOutput:
|
||||||
|
new_email = sanitize_email(new_email)
|
||||||
|
if new_email == mailbox.email:
|
||||||
|
raise MailboxError("Same email")
|
||||||
|
check_email_for_mailbox(new_email, user)
|
||||||
|
if email_ownership_verified:
|
||||||
|
mailbox.email = new_email
|
||||||
|
mailbox.new_email = None
|
||||||
|
mailbox.verified = True
|
||||||
|
else:
|
||||||
|
mailbox.new_email = new_email
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Updated mailbox {mailbox.id} email ({new_email}) pre-verified({email_ownership_verified}",
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
Session.commit()
|
||||||
|
except IntegrityError:
|
||||||
|
LOG.i(f"This email {new_email} is already pending for some mailbox")
|
||||||
|
Session.rollback()
|
||||||
|
raise MailboxError("Email already in use")
|
||||||
|
|
||||||
|
if email_ownership_verified:
|
||||||
|
LOG.i(f"User {user} as created a pre-verified mailbox with {new_email}")
|
||||||
|
return CreateMailboxOutput(mailbox=mailbox, activation=None)
|
||||||
|
|
||||||
|
LOG.i(f"User {user} has updated mailbox email with {new_email}")
|
||||||
|
activation = generate_activation_code(mailbox, use_digit_code=use_digit_codes)
|
||||||
|
output = CreateMailboxOutput(mailbox=mailbox, activation=activation)
|
||||||
|
|
||||||
|
if not send_email:
|
||||||
|
LOG.i(f"Skipping sending validation email for mailbox {mailbox}")
|
||||||
|
return output
|
||||||
|
|
||||||
|
send_change_email(
|
||||||
|
user,
|
||||||
|
mailbox,
|
||||||
|
activation=activation,
|
||||||
|
)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
class MailboxEmailChangeError(Enum):
|
class MailboxEmailChangeError(Enum):
|
||||||
InvalidId = 1
|
InvalidId = 1
|
||||||
EmailAlreadyUsed = 2
|
EmailAlreadyUsed = 2
|
||||||
@ -328,3 +438,73 @@ def perform_mailbox_email_change(mailbox_id: int) -> MailboxEmailChangeResult:
|
|||||||
message="Invalid link",
|
message="Invalid link",
|
||||||
message_category="error",
|
message_category="error",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def cancel_email_change(mailbox_id: int, user: User):
|
||||||
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
|
if not mailbox:
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has tried to cancel a mailbox an unknown mailbox {mailbox_id}"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid mailbox")
|
||||||
|
if mailbox.user.id != user.id:
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} has tried to cancel a mailbox {mailbox} owned by another user"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid mailbox")
|
||||||
|
mailbox.new_email = None
|
||||||
|
LOG.i(f"User {mailbox.user} has cancelled mailbox email change")
|
||||||
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
|
|
||||||
|
|
||||||
|
def __get_alias_mailbox_from_email(
|
||||||
|
email_address: str, alias: Alias
|
||||||
|
) -> Optional[Mailbox]:
|
||||||
|
for mailbox in alias.mailboxes:
|
||||||
|
if mailbox.email == email_address:
|
||||||
|
return mailbox
|
||||||
|
|
||||||
|
for authorized_address in mailbox.authorized_addresses:
|
||||||
|
if authorized_address.email == email_address:
|
||||||
|
LOG.d(
|
||||||
|
"Found an authorized address for %s %s %s",
|
||||||
|
alias,
|
||||||
|
mailbox,
|
||||||
|
authorized_address,
|
||||||
|
)
|
||||||
|
return mailbox
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def __get_alias_mailbox_from_email_or_canonical_email(
|
||||||
|
email_address: str, alias: Alias
|
||||||
|
) -> Optional[Mailbox]:
|
||||||
|
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
||||||
|
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
||||||
|
mbox = __get_alias_mailbox_from_email(email_address, alias)
|
||||||
|
if mbox is not None:
|
||||||
|
return mbox
|
||||||
|
canonical_email = canonicalize_email(email_address)
|
||||||
|
if canonical_email != email_address:
|
||||||
|
return __get_alias_mailbox_from_email(canonical_email, alias)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_mailbox_for_reply_phase(
|
||||||
|
envelope_mail_from: str, header_mail_from: str, alias
|
||||||
|
) -> Optional[Mailbox]:
|
||||||
|
"""return the corresponding mailbox given the mail_from and alias
|
||||||
|
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
||||||
|
"""
|
||||||
|
mbox = __get_alias_mailbox_from_email_or_canonical_email(envelope_mail_from, alias)
|
||||||
|
if mbox is not None:
|
||||||
|
return mbox
|
||||||
|
if not header_mail_from:
|
||||||
|
return None
|
||||||
|
envelope_from_domain = get_email_domain_part(envelope_mail_from)
|
||||||
|
header_from_domain = get_email_domain_part(header_mail_from)
|
||||||
|
if envelope_from_domain != header_from_domain:
|
||||||
|
return None
|
||||||
|
# For services that use VERP sending (envelope from has encoded data to account for bounces)
|
||||||
|
# if the domain is the same in the header from as the envelope from we can use the header from
|
||||||
|
return __get_alias_mailbox_from_email_or_canonical_email(header_mail_from, alias)
|
||||||
|
@ -30,9 +30,9 @@ from sqlalchemy_utils import ArrowType
|
|||||||
|
|
||||||
from app import config, rate_limiter
|
from app import config, rate_limiter
|
||||||
from app import s3
|
from app import s3
|
||||||
|
from app.constants import JobType
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_mx_domains
|
from app.dns_utils import get_mx_domains
|
||||||
|
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AliasInTrashError,
|
AliasInTrashError,
|
||||||
DirectoryInTrashError,
|
DirectoryInTrashError,
|
||||||
@ -239,6 +239,7 @@ class AuditLogActionEnum(EnumE):
|
|||||||
disable_user = 9
|
disable_user = 9
|
||||||
enable_user = 10
|
enable_user = 10
|
||||||
stop_trial = 11
|
stop_trial = 11
|
||||||
|
unlink_user = 12
|
||||||
|
|
||||||
|
|
||||||
class Phase(EnumE):
|
class Phase(EnumE):
|
||||||
@ -275,6 +276,12 @@ class AliasDeleteReason(EnumE):
|
|||||||
CustomDomainDeleted = 5
|
CustomDomainDeleted = 5
|
||||||
|
|
||||||
|
|
||||||
|
class JobPriority(EnumE):
|
||||||
|
Low = 1
|
||||||
|
Default = 50
|
||||||
|
High = 100
|
||||||
|
|
||||||
|
|
||||||
class IntEnumType(sa.types.TypeDecorator):
|
class IntEnumType(sa.types.TypeDecorator):
|
||||||
impl = sa.Integer
|
impl = sa.Integer
|
||||||
|
|
||||||
@ -343,7 +350,7 @@ class Fido(Base, ModelMixin):
|
|||||||
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
|
||||||
FLAG_DISABLE_CREATE_CONTACTS = 1 << 0
|
FLAG_FREE_DISABLE_CREATE_CONTACTS = 1 << 0
|
||||||
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
||||||
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
||||||
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
||||||
@ -362,7 +369,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||||
)
|
)
|
||||||
|
|
||||||
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||||
|
|
||||||
# an account can be disabled if having harmful behavior
|
# an account can be disabled if having harmful behavior
|
||||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||||
@ -550,7 +557,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
# bitwise flags. Allow for future expansion
|
# bitwise flags. Allow for future expansion
|
||||||
flags = sa.Column(
|
flags = sa.Column(
|
||||||
sa.BigInteger,
|
sa.BigInteger,
|
||||||
default=FLAG_DISABLE_CREATE_CONTACTS,
|
default=FLAG_FREE_DISABLE_CREATE_CONTACTS,
|
||||||
server_default="0",
|
server_default="0",
|
||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
@ -576,6 +583,12 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
|
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
|
||||||
),
|
),
|
||||||
sa.Index("ix_users_profile_picture_id", profile_picture_id),
|
sa.Index("ix_users_profile_picture_id", profile_picture_id),
|
||||||
|
sa.Index(
|
||||||
|
"idx_users_email_trgm",
|
||||||
|
"email",
|
||||||
|
postgresql_ops={"email": "gin_trgm_ops"},
|
||||||
|
postgresql_using="gin",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -640,11 +653,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
# If the user is created from partner, do not notify
|
# If the user is created from partner, do not notify
|
||||||
# nor give a trial
|
# nor give a trial
|
||||||
if from_partner:
|
if from_partner:
|
||||||
user.flags = User.FLAG_CREATED_FROM_PARTNER
|
user.flags = user.flags | User.FLAG_CREATED_FROM_PARTNER
|
||||||
user.notification = False
|
user.notification = False
|
||||||
user.trial_end = None
|
user.trial_end = None
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_SEND_PROTON_WELCOME_1,
|
name=JobType.SEND_PROTON_WELCOME_1.value,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
)
|
)
|
||||||
@ -670,17 +683,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
|
|
||||||
# Schedule onboarding emails
|
# Schedule onboarding emails
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_ONBOARDING_1,
|
name=JobType.ONBOARDING_1.value,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now().shift(days=1),
|
run_at=arrow.now().shift(days=1),
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_ONBOARDING_2,
|
name=JobType.ONBOARDING_2.value,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now().shift(days=2),
|
run_at=arrow.now().shift(days=2),
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_ONBOARDING_4,
|
name=JobType.ONBOARDING_4.value,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now().shift(days=3),
|
run_at=arrow.now().shift(days=3),
|
||||||
)
|
)
|
||||||
@ -1189,7 +1202,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
def can_create_contacts(self) -> bool:
|
def can_create_contacts(self) -> bool:
|
||||||
if self.is_premium():
|
if self.is_premium():
|
||||||
return True
|
return True
|
||||||
if self.flags & User.FLAG_DISABLE_CREATE_CONTACTS == 0:
|
if self.flags & User.FLAG_FREE_DISABLE_CREATE_CONTACTS == 0:
|
||||||
return True
|
return True
|
||||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||||
|
|
||||||
@ -1659,7 +1672,7 @@ class Alias(Base, ModelMixin):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_custom_domain(alias_address) -> Optional["CustomDomain"]:
|
def get_custom_domain(alias_address: str) -> Optional["CustomDomain"]:
|
||||||
alias_domain = validate_email(
|
alias_domain = validate_email(
|
||||||
alias_address, check_deliverability=False, allow_smtputf8=False
|
alias_address, check_deliverability=False, allow_smtputf8=False
|
||||||
).domain
|
).domain
|
||||||
@ -1924,13 +1937,16 @@ class Contact(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"),
|
sa.UniqueConstraint("alias_id", "website_email", name="uq_contact"),
|
||||||
|
sa.Index("ix_contact_user_id_id", "user_id", "id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(User.id, ondelete="cascade"),
|
||||||
|
nullable=False,
|
||||||
)
|
)
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
||||||
|
nullable=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
name = sa.Column(
|
name = sa.Column(
|
||||||
@ -2115,11 +2131,10 @@ class EmailLog(Base, ModelMixin):
|
|||||||
Index("ix_email_log_mailbox_id", "mailbox_id"),
|
Index("ix_email_log_mailbox_id", "mailbox_id"),
|
||||||
Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
|
Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
|
||||||
Index("ix_email_log_refused_email_id", "refused_email_id"),
|
Index("ix_email_log_refused_email_id", "refused_email_id"),
|
||||||
|
Index("ix_email_log_user_id_email_log_id", "user_id", "id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
|
||||||
)
|
|
||||||
contact_id = sa.Column(
|
contact_id = sa.Column(
|
||||||
sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Contact.id, ondelete="cascade"), nullable=False, index=True
|
||||||
)
|
)
|
||||||
@ -2395,7 +2410,8 @@ class AliasUsedOn(Base, ModelMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
||||||
|
nullable=False,
|
||||||
)
|
)
|
||||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||||
|
|
||||||
@ -2418,10 +2434,7 @@ class ApiKey(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (sa.Index("ix_api_key_user_id", "user_id"),)
|
||||||
sa.Index("ix_api_key_code", "code"),
|
|
||||||
sa.Index("ix_api_key_user_id", "user_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, user_id, name=None, **kwargs):
|
def create(cls, user_id, name=None, **kwargs):
|
||||||
@ -2581,7 +2594,6 @@ class AutoCreateRule(Base, ModelMixin):
|
|||||||
sa.UniqueConstraint(
|
sa.UniqueConstraint(
|
||||||
"custom_domain_id", "order", name="uq_auto_create_rule_order"
|
"custom_domain_id", "order", name="uq_auto_create_rule_order"
|
||||||
),
|
),
|
||||||
sa.Index("ix_auto_create_rule_custom_domain_id", "custom_domain_id"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
custom_domain_id = sa.Column(
|
custom_domain_id = sa.Column(
|
||||||
@ -2764,12 +2776,19 @@ class Job(Base, ModelMixin):
|
|||||||
nullable=False,
|
nullable=False,
|
||||||
server_default=str(JobState.ready.value),
|
server_default=str(JobState.ready.value),
|
||||||
default=JobState.ready.value,
|
default=JobState.ready.value,
|
||||||
index=True,
|
|
||||||
)
|
)
|
||||||
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
|
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
|
||||||
taken_at = sa.Column(ArrowType, nullable=True)
|
taken_at = sa.Column(ArrowType, nullable=True)
|
||||||
|
priority = sa.Column(
|
||||||
|
IntEnumType(JobPriority),
|
||||||
|
default=JobPriority.Default,
|
||||||
|
server_default=str(JobPriority.Default.value),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
|
||||||
__table_args__ = (Index("ix_state_run_at_taken_at", state, run_at, taken_at),)
|
__table_args__ = (
|
||||||
|
Index("ix_state_run_at_taken_at_priority", state, run_at, taken_at, priority),
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Job {self.id} {self.name} {self.payload}>"
|
return f"<Job {self.id} {self.name} {self.payload}>"
|
||||||
@ -2777,9 +2796,7 @@ class Job(Base, ModelMixin):
|
|||||||
|
|
||||||
class Mailbox(Base, ModelMixin):
|
class Mailbox(Base, ModelMixin):
|
||||||
__tablename__ = "mailbox"
|
__tablename__ = "mailbox"
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
|
||||||
)
|
|
||||||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||||
verified = sa.Column(sa.Boolean, default=False, nullable=False)
|
verified = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||||
force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False)
|
force_spf = sa.Column(sa.Boolean, default=True, server_default="1", nullable=False)
|
||||||
@ -2808,6 +2825,13 @@ class Mailbox(Base, ModelMixin):
|
|||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
|
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
|
||||||
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
|
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
|
||||||
|
# index on email column using pg_trgm
|
||||||
|
Index(
|
||||||
|
"ix_mailbox_email_trgm_idx",
|
||||||
|
"email",
|
||||||
|
postgresql_ops={"email": "gin_trgm_ops"},
|
||||||
|
postgresql_using="gin",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
@ -2830,24 +2854,20 @@ class Mailbox(Base, ModelMixin):
|
|||||||
return len(alias_ids)
|
return len(alias_ids)
|
||||||
|
|
||||||
def is_proton(self) -> bool:
|
def is_proton(self) -> bool:
|
||||||
if (
|
for proton_email_domain in config.PROTON_EMAIL_DOMAINS:
|
||||||
self.email.endswith("@proton.me")
|
if self.email.endswith(f"@{proton_email_domain}"):
|
||||||
or self.email.endswith("@protonmail.com")
|
return True
|
||||||
or self.email.endswith("@protonmail.ch")
|
|
||||||
or self.email.endswith("@proton.ch")
|
|
||||||
or self.email.endswith("@pm.me")
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
from app.email_utils import get_email_local_part
|
from app.email_utils import get_email_local_part
|
||||||
|
|
||||||
mx_domains = get_mx_domains(get_email_local_part(self.email))
|
mx_domains = get_mx_domains(get_email_local_part(self.email))
|
||||||
|
|
||||||
|
proton_mx_domains = config.PROTON_MX_SERVERS
|
||||||
# Proton is the first domain
|
# Proton is the first domain
|
||||||
if mx_domains and mx_domains[0].domain in (
|
for prio in mx_domains:
|
||||||
"mail.protonmail.ch.",
|
for mx_domain in mx_domains[prio]:
|
||||||
"mailsec.protonmail.ch.",
|
if mx_domain in proton_mx_domains:
|
||||||
):
|
return True
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -3010,7 +3030,11 @@ class SentAlert(Base, ModelMixin):
|
|||||||
to_email = sa.Column(sa.String(256), nullable=False)
|
to_email = sa.Column(sa.String(256), nullable=False)
|
||||||
alert_type = sa.Column(sa.String(256), nullable=False)
|
alert_type = sa.Column(sa.String(256), nullable=False)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_sent_alert_user_id", "user_id"),)
|
__table_args__ = (
|
||||||
|
sa.Index("ix_sent_alert_user_id", "user_id"),
|
||||||
|
sa.Index("ix_sent_alert_to_email", "to_email"),
|
||||||
|
sa.Index("ix_sent_alert_alert_type", "alert_type"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AliasMailbox(Base, ModelMixin):
|
class AliasMailbox(Base, ModelMixin):
|
||||||
@ -3020,7 +3044,8 @@ class AliasMailbox(Base, ModelMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Alias.id, ondelete="cascade"),
|
||||||
|
nullable=False,
|
||||||
)
|
)
|
||||||
mailbox_id = sa.Column(
|
mailbox_id = sa.Column(
|
||||||
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
|
||||||
@ -3035,7 +3060,8 @@ class AliasHibp(Base, ModelMixin):
|
|||||||
__table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),)
|
__table_args__ = (sa.UniqueConstraint("alias_id", "hibp_id", name="uq_alias_hibp"),)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
sa.Integer(), sa.ForeignKey("alias.id", ondelete="cascade"), index=True
|
sa.Integer(),
|
||||||
|
sa.ForeignKey("alias.id", ondelete="cascade"),
|
||||||
)
|
)
|
||||||
hibp_id = sa.Column(
|
hibp_id = sa.Column(
|
||||||
sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True
|
sa.Integer(), sa.ForeignKey("hibp.id", ondelete="cascade"), index=True
|
||||||
@ -3751,7 +3777,8 @@ class PartnerUser(Base, ModelMixin):
|
|||||||
index=True,
|
index=True,
|
||||||
)
|
)
|
||||||
partner_id = sa.Column(
|
partner_id = sa.Column(
|
||||||
sa.ForeignKey("partner.id", ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey("partner.id", ondelete="cascade"),
|
||||||
|
nullable=False,
|
||||||
)
|
)
|
||||||
external_user_id = sa.Column(sa.String(128), unique=False, nullable=False)
|
external_user_id = sa.Column(sa.String(128), unique=False, nullable=False)
|
||||||
partner_email = sa.Column(sa.String(255), unique=False, nullable=True)
|
partner_email = sa.Column(sa.String(255), unique=False, nullable=True)
|
||||||
@ -3778,7 +3805,8 @@ class PartnerSubscription(Base, ModelMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# when the partner subscription ends
|
# when the partner subscription ends
|
||||||
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
end_at = sa.Column(ArrowType, nullable=True, index=True)
|
||||||
|
lifetime = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||||
|
|
||||||
partner_user = orm.relationship(PartnerUser)
|
partner_user = orm.relationship(PartnerUser)
|
||||||
|
|
||||||
@ -3800,7 +3828,9 @@ class PartnerSubscription(Base, ModelMixin):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def is_active(self):
|
def is_active(self):
|
||||||
return self.end_at > arrow.now().shift(days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS)
|
return self.lifetime or self.end_at > arrow.now().shift(
|
||||||
|
days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# endregion
|
# endregion
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from app.build_info import SHA1
|
from app.build_info import SHA1, VERSION
|
||||||
from app.monitor.base import monitor_bp
|
from app.monitor.base import monitor_bp
|
||||||
|
|
||||||
|
|
||||||
@ -7,6 +7,11 @@ def git_sha1():
|
|||||||
return SHA1
|
return SHA1
|
||||||
|
|
||||||
|
|
||||||
|
@monitor_bp.route("/version")
|
||||||
|
def version():
|
||||||
|
return VERSION
|
||||||
|
|
||||||
|
|
||||||
@monitor_bp.route("/live")
|
@monitor_bp.route("/live")
|
||||||
def live():
|
def live():
|
||||||
return "live"
|
return "live"
|
||||||
|
8
app/app/monitor_utils.py
Normal file
8
app/app/monitor_utils.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from app.build_info import VERSION
|
||||||
|
import newrelic.agent
|
||||||
|
|
||||||
|
|
||||||
|
def send_version_event(service: str):
|
||||||
|
newrelic.agent.record_custom_event(
|
||||||
|
"ServiceVersion", {"service": service, "version": VERSION}
|
||||||
|
)
|
@ -3,7 +3,7 @@ from typing import Optional
|
|||||||
import arrow
|
import arrow
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
|
|
||||||
from app import config
|
from app.constants import JobType
|
||||||
from app.models import PartnerUser, PartnerSubscription, User, Job
|
from app.models import PartnerUser, PartnerSubscription, User, Job
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ def create_partner_user(
|
|||||||
external_user_id=external_user_id,
|
external_user_id=external_user_id,
|
||||||
)
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
|
name=JobType.SEND_ALIAS_CREATION_EVENTS.value,
|
||||||
payload={"user_id": user.id},
|
payload={"user_id": user.id},
|
||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
)
|
)
|
||||||
@ -33,12 +33,14 @@ def create_partner_user(
|
|||||||
|
|
||||||
def create_partner_subscription(
|
def create_partner_subscription(
|
||||||
partner_user: PartnerUser,
|
partner_user: PartnerUser,
|
||||||
expiration: Optional[Arrow],
|
expiration: Optional[Arrow] = None,
|
||||||
|
lifetime: bool = False,
|
||||||
msg: Optional[str] = None,
|
msg: Optional[str] = None,
|
||||||
) -> PartnerSubscription:
|
) -> PartnerSubscription:
|
||||||
instance = PartnerSubscription.create(
|
instance = PartnerSubscription.create(
|
||||||
partner_user_id=partner_user.id,
|
partner_user_id=partner_user.id,
|
||||||
end_at=expiration,
|
end_at=expiration,
|
||||||
|
lifetime=lifetime,
|
||||||
)
|
)
|
||||||
|
|
||||||
message = "User upgraded through partner subscription"
|
message = "User upgraded through partner subscription"
|
||||||
|
@ -16,6 +16,7 @@ PROTON_ERROR_CODE_HV_NEEDED = 9001
|
|||||||
|
|
||||||
PLAN_FREE = 1
|
PLAN_FREE = 1
|
||||||
PLAN_PREMIUM = 2
|
PLAN_PREMIUM = 2
|
||||||
|
PLAN_PREMIUM_LIFETIME = 3
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -112,10 +113,13 @@ class HttpProtonClient(ProtonClient):
|
|||||||
if plan_value == PLAN_FREE:
|
if plan_value == PLAN_FREE:
|
||||||
plan = SLPlan(type=SLPlanType.Free, expiration=None)
|
plan = SLPlan(type=SLPlanType.Free, expiration=None)
|
||||||
elif plan_value == PLAN_PREMIUM:
|
elif plan_value == PLAN_PREMIUM:
|
||||||
|
expiration = info.get("PlanExpiration", "1")
|
||||||
plan = SLPlan(
|
plan = SLPlan(
|
||||||
type=SLPlanType.Premium,
|
type=SLPlanType.Premium,
|
||||||
expiration=Arrow.fromtimestamp(info["PlanExpiration"], tzinfo="utc"),
|
expiration=Arrow.fromtimestamp(expiration, tzinfo="utc"),
|
||||||
)
|
)
|
||||||
|
elif plan_value == PLAN_PREMIUM_LIFETIME:
|
||||||
|
plan = SLPlan(SLPlanType.PremiumLifetime, expiration=None)
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Invalid value for plan: {plan_value}")
|
raise Exception(f"Invalid value for plan: {plan_value}")
|
||||||
|
|
||||||
|
23
app/app/proton/proton_partner.py
Normal file
23
app/app/proton/proton_partner.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
|
from app.models import Partner
|
||||||
|
|
||||||
|
PROTON_PARTNER_NAME = "Proton"
|
||||||
|
_PROTON_PARTNER: Optional[Partner] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_proton_partner() -> Partner:
|
||||||
|
global _PROTON_PARTNER
|
||||||
|
if _PROTON_PARTNER is None:
|
||||||
|
partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
||||||
|
if partner is None:
|
||||||
|
raise ProtonPartnerNotSetUp
|
||||||
|
Session.expunge(partner)
|
||||||
|
_PROTON_PARTNER = partner
|
||||||
|
return _PROTON_PARTNER
|
||||||
|
|
||||||
|
|
||||||
|
def is_proton_partner(partner: Partner) -> bool:
|
||||||
|
return partner.name == PROTON_PARTNER_NAME
|
@ -1,32 +1,23 @@
|
|||||||
from newrelic import agent
|
from newrelic import agent
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import EventContent, UserUnlinked
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
from app.models import User, PartnerUser
|
||||||
from app.models import Partner, PartnerUser, User
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
PROTON_PARTNER_NAME = "Proton"
|
|
||||||
_PROTON_PARTNER: Optional[Partner] = None
|
def can_unlink_proton_account(user: User) -> bool:
|
||||||
|
return (user.flags & User.FLAG_CREATED_FROM_PARTNER) == 0
|
||||||
|
|
||||||
|
|
||||||
def get_proton_partner() -> Partner:
|
def perform_proton_account_unlink(
|
||||||
global _PROTON_PARTNER
|
current_user: User, skip_check: bool = False
|
||||||
if _PROTON_PARTNER is None:
|
) -> None | str:
|
||||||
partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
if not skip_check and not can_unlink_proton_account(current_user):
|
||||||
if partner is None:
|
return None
|
||||||
raise ProtonPartnerNotSetUp
|
|
||||||
Session.expunge(partner)
|
|
||||||
_PROTON_PARTNER = partner
|
|
||||||
return _PROTON_PARTNER
|
|
||||||
|
|
||||||
|
|
||||||
def is_proton_partner(partner: Partner) -> bool:
|
|
||||||
return partner.name == PROTON_PARTNER_NAME
|
|
||||||
|
|
||||||
|
|
||||||
def perform_proton_account_unlink(current_user: User):
|
|
||||||
proton_partner = get_proton_partner()
|
proton_partner = get_proton_partner()
|
||||||
partner_user = PartnerUser.get_by(
|
partner_user = PartnerUser.get_by(
|
||||||
user_id=current_user.id, partner_id=proton_partner.id
|
user_id=current_user.id, partner_id=proton_partner.id
|
||||||
@ -38,6 +29,11 @@ def perform_proton_account_unlink(current_user: User):
|
|||||||
action=UserAuditLogAction.UnlinkAccount,
|
action=UserAuditLogAction.UnlinkAccount,
|
||||||
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
|
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
|
||||||
)
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
partner_user.user, EventContent(user_unlinked=UserUnlinked())
|
||||||
|
)
|
||||||
PartnerUser.delete(partner_user.id)
|
PartnerUser.delete(partner_user.id)
|
||||||
|
external_user_id = partner_user.external_user_id
|
||||||
Session.commit()
|
Session.commit()
|
||||||
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
||||||
|
return external_user_id
|
6
app/app/request_utils.py
Normal file
6
app/app/request_utils.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from random import randbytes
|
||||||
|
from base64 import b64encode
|
||||||
|
|
||||||
|
|
||||||
|
def generate_request_id() -> str:
|
||||||
|
return b64encode(randbytes(6)).decode()
|
@ -1,6 +1,7 @@
|
|||||||
"""Inspired from
|
"""Inspired from
|
||||||
https://github.com/petermat/spamassassin_client
|
https://github.com/petermat/spamassassin_client
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
@ -14,9 +14,9 @@ from sqlalchemy.sql import Insert, text
|
|||||||
from app import s3, config
|
from app import s3, config
|
||||||
from app.alias_utils import nb_email_log_for_mailbox
|
from app.alias_utils import nb_email_log_for_mailbox
|
||||||
from app.api.views.apple import verify_receipt
|
from app.api.views.apple import verify_receipt
|
||||||
from app.custom_domain_validation import CustomDomainValidation
|
from app.custom_domain_validation import CustomDomainValidation, is_mx_equivalent
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_mx_domains, is_mx_equivalent
|
from app.dns_utils import get_mx_domains
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
send_email,
|
send_email,
|
||||||
send_trial_end_soon_email,
|
send_trial_end_soon_email,
|
||||||
@ -59,7 +59,7 @@ from app.models import (
|
|||||||
ApiToCookieToken,
|
ApiToCookieToken,
|
||||||
)
|
)
|
||||||
from app.pgp_utils import load_public_key_and_check, PGPException
|
from app.pgp_utils import load_public_key_and_check, PGPException
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
@ -14,8 +14,9 @@ jobs:
|
|||||||
- name: SimpleLogin Custom Domain check
|
- name: SimpleLogin Custom Domain check
|
||||||
command: python /code/cron.py -j check_custom_domain
|
command: python /code/cron.py -j check_custom_domain
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 2 * * *"
|
schedule: "15 */4 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
concurrencyPolicy: Forbid
|
||||||
onFailure:
|
onFailure:
|
||||||
retry:
|
retry:
|
||||||
maximumRetries: 10
|
maximumRetries: 10
|
||||||
@ -26,7 +27,7 @@ jobs:
|
|||||||
- name: SimpleLogin HIBP check
|
- name: SimpleLogin HIBP check
|
||||||
command: python /code/cron.py -j check_hibp
|
command: python /code/cron.py -j check_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "16 */4 * * *"
|
schedule: "13 */4 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
onFailure:
|
onFailure:
|
||||||
|
@ -369,8 +369,8 @@ For ex:
|
|||||||
"is_premium": false
|
"is_premium": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"signed_suffix": ".yeah@sl.local.X6_7OQ.i8XL4xsMsn7dxDEWU8eF-Zap0qo",
|
"signed_suffix": ".yeah@sl.lan.X6_7OQ.i8XL4xsMsn7dxDEWU8eF-Zap0qo",
|
||||||
"suffix": ".yeah@sl.local",
|
"suffix": ".yeah@sl.lan",
|
||||||
"is_custom": true,
|
"is_custom": true,
|
||||||
"is_premium": false
|
"is_premium": false
|
||||||
}
|
}
|
||||||
@ -465,7 +465,7 @@ Here's an example:
|
|||||||
{
|
{
|
||||||
"creation_date": "2020-04-06 17:57:14+00:00",
|
"creation_date": "2020-04-06 17:57:14+00:00",
|
||||||
"creation_timestamp": 1586195834,
|
"creation_timestamp": 1586195834,
|
||||||
"email": "prefix1.cat@sl.local",
|
"email": "prefix1.cat@sl.lan",
|
||||||
"name": "A Name",
|
"name": "A Name",
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"id": 3,
|
"id": 3,
|
||||||
@ -518,7 +518,7 @@ Alias info, use the same format as in /api/v2/aliases. For example:
|
|||||||
{
|
{
|
||||||
"creation_date": "2020-04-06 17:57:14+00:00",
|
"creation_date": "2020-04-06 17:57:14+00:00",
|
||||||
"creation_timestamp": 1586195834,
|
"creation_timestamp": 1586195834,
|
||||||
"email": "prefix1.cat@sl.local",
|
"email": "prefix1.cat@sl.lan",
|
||||||
"name": "A Name",
|
"name": "A Name",
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"id": 3,
|
"id": 3,
|
||||||
@ -608,7 +608,7 @@ If success, 200 with the list of activities, for example:
|
|||||||
"activities": [
|
"activities": [
|
||||||
{
|
{
|
||||||
"action": "reply",
|
"action": "reply",
|
||||||
"from": "yes_meo_chat@sl.local",
|
"from": "yes_meo_chat@sl.lan",
|
||||||
"timestamp": 1580903760,
|
"timestamp": 1580903760,
|
||||||
"to": "marketing@example.com",
|
"to": "marketing@example.com",
|
||||||
"reverse_alias": "\"marketing at example.com\" <reply@a.b>",
|
"reverse_alias": "\"marketing at example.com\" <reply@a.b>",
|
||||||
@ -703,7 +703,7 @@ Return 200 and `existed=true` if contact is already added.
|
|||||||
"creation_timestamp": 1584186761,
|
"creation_timestamp": 1584186761,
|
||||||
"last_email_sent_date": null,
|
"last_email_sent_date": null,
|
||||||
"last_email_sent_timestamp": null,
|
"last_email_sent_timestamp": null,
|
||||||
"reverse_alias": "First Last first@example.com <ra+qytyzjhrumrreuszrbjxqjlkh@sl.local>",
|
"reverse_alias": "First Last first@example.com <ra+qytyzjhrumrreuszrbjxqjlkh@sl.lan>",
|
||||||
"reverse_alias_address": "reply+bzvpazcdedcgcpztehxzgjgzmxskqa@sl.co",
|
"reverse_alias_address": "reply+bzvpazcdedcgcpztehxzgjgzmxskqa@sl.co",
|
||||||
"existed": false
|
"existed": false
|
||||||
}
|
}
|
||||||
@ -992,7 +992,7 @@ Return user setting.
|
|||||||
{
|
{
|
||||||
"alias_generator": "word",
|
"alias_generator": "word",
|
||||||
"notification": true,
|
"notification": true,
|
||||||
"random_alias_default_domain": "sl.local",
|
"random_alias_default_domain": "sl.lan",
|
||||||
"sender_format": "AT",
|
"sender_format": "AT",
|
||||||
"random_alias_suffix": "random_string"
|
"random_alias_suffix": "random_string"
|
||||||
}
|
}
|
||||||
@ -1029,7 +1029,7 @@ Return domains that user can use to create random alias
|
|||||||
"is_custom": false
|
"is_custom": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"domain": "sl.local",
|
"domain": "sl.lan",
|
||||||
"is_custom": false
|
"is_custom": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -30,6 +30,7 @@ It should contain the following info:
|
|||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import email
|
import email
|
||||||
import time
|
import time
|
||||||
@ -149,6 +150,7 @@ from app.handler.unsubscribe_generator import UnsubscribeGenerator
|
|||||||
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
||||||
from app.log import LOG, set_message_id
|
from app.log import LOG, set_message_id
|
||||||
from app.mail_sender import sl_sendmail
|
from app.mail_sender import sl_sendmail
|
||||||
|
from app.mailbox_utils import get_mailbox_for_reply_phase
|
||||||
from app.message_utils import message_to_bytes
|
from app.message_utils import message_to_bytes
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
@ -166,13 +168,14 @@ from app.models import (
|
|||||||
VerpType,
|
VerpType,
|
||||||
SLDomain,
|
SLDomain,
|
||||||
)
|
)
|
||||||
|
from app.monitor_utils import send_version_event
|
||||||
from app.pgp_utils import (
|
from app.pgp_utils import (
|
||||||
PGPException,
|
PGPException,
|
||||||
sign_data_with_pgpy,
|
sign_data_with_pgpy,
|
||||||
sign_data,
|
sign_data,
|
||||||
load_public_key_and_check,
|
load_public_key_and_check,
|
||||||
)
|
)
|
||||||
from app.utils import sanitize_email, canonicalize_email
|
from app.utils import sanitize_email
|
||||||
from init_app import load_pgp_public_keys
|
from init_app import load_pgp_public_keys
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
@ -589,15 +592,42 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||||||
contact.alias
|
contact.alias
|
||||||
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
||||||
|
|
||||||
reply_to_contact = None
|
reply_to_contact = []
|
||||||
if msg[headers.REPLY_TO]:
|
if msg[headers.REPLY_TO]:
|
||||||
reply_to = get_header_unicode(msg[headers.REPLY_TO])
|
reply_to_header_contents = get_header_unicode(msg[headers.REPLY_TO])
|
||||||
LOG.d("Create or get contact for reply_to_header:%s", reply_to)
|
if reply_to_header_contents:
|
||||||
# ignore when reply-to = alias
|
LOG.d(
|
||||||
if reply_to == alias.email:
|
"Create or get contact for reply_to_header:%s", reply_to_header_contents
|
||||||
LOG.i("Reply-to same as alias %s", alias)
|
)
|
||||||
else:
|
for reply_to in [
|
||||||
reply_to_contact = get_or_create_reply_to_contact(reply_to, alias, msg)
|
reply_to.strip()
|
||||||
|
for reply_to in reply_to_header_contents.split(",")
|
||||||
|
if reply_to.strip()
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
reply_to_name, reply_to_email = parse_full_address(reply_to)
|
||||||
|
except ValueError:
|
||||||
|
LOG.d(f"Could not parse reply-to address {reply_to}")
|
||||||
|
continue
|
||||||
|
if reply_to_email == alias.email:
|
||||||
|
LOG.i("Reply-to same as alias %s", alias)
|
||||||
|
else:
|
||||||
|
reply_contact = get_or_create_reply_to_contact(
|
||||||
|
reply_to_email, alias, msg
|
||||||
|
)
|
||||||
|
if reply_contact:
|
||||||
|
reply_to_contact.append(reply_contact)
|
||||||
|
|
||||||
|
if alias.user.delete_on is not None:
|
||||||
|
LOG.d(f"user {user} is pending to be deleted. Do not forward")
|
||||||
|
EmailLog.create(
|
||||||
|
contact_id=contact.id,
|
||||||
|
user_id=contact.user_id,
|
||||||
|
blocked=True,
|
||||||
|
alias_id=contact.alias_id,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
return [(True, status.E502)]
|
||||||
|
|
||||||
if not alias.enabled or contact.block_forward:
|
if not alias.enabled or contact.block_forward:
|
||||||
LOG.d("%s is disabled, do not forward", alias)
|
LOG.d("%s is disabled, do not forward", alias)
|
||||||
@ -689,7 +719,7 @@ def forward_email_to_mailbox(
|
|||||||
envelope,
|
envelope,
|
||||||
mailbox,
|
mailbox,
|
||||||
user,
|
user,
|
||||||
reply_to_contact: Optional[Contact],
|
reply_to_contacts: list[Contact],
|
||||||
) -> (bool, str):
|
) -> (bool, str):
|
||||||
LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox)
|
LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox)
|
||||||
|
|
||||||
@ -872,11 +902,13 @@ def forward_email_to_mailbox(
|
|||||||
add_or_replace_header(msg, "From", new_from_header)
|
add_or_replace_header(msg, "From", new_from_header)
|
||||||
LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header)
|
LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header)
|
||||||
|
|
||||||
if reply_to_contact:
|
if len(reply_to_contacts) > 0:
|
||||||
reply_to_header = msg[headers.REPLY_TO]
|
original_reply_to = get_header_unicode(msg[headers.REPLY_TO])
|
||||||
new_reply_to_header = reply_to_contact.new_addr()
|
new_reply_to_header = ", ".join(
|
||||||
|
[reply_to_contact.new_addr() for reply_to_contact in reply_to_contacts][:5]
|
||||||
|
)
|
||||||
add_or_replace_header(msg, "Reply-To", new_reply_to_header)
|
add_or_replace_header(msg, "Reply-To", new_reply_to_header)
|
||||||
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, reply_to_header)
|
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, original_reply_to)
|
||||||
|
|
||||||
# replace CC & To emails by reverse-alias for all emails that are not alias
|
# replace CC & To emails by reverse-alias for all emails that are not alias
|
||||||
try:
|
try:
|
||||||
@ -1008,7 +1040,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
return False, status.E503
|
return False, status.E503
|
||||||
|
|
||||||
user = alias.user
|
user = alias.user
|
||||||
mail_from = envelope.mail_from
|
|
||||||
|
|
||||||
if not user.can_send_or_receive():
|
if not user.can_send_or_receive():
|
||||||
LOG.i(f"User {user} cannot send emails")
|
LOG.i(f"User {user} cannot send emails")
|
||||||
@ -1022,13 +1053,15 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
return False, dmarc_delivery_status
|
return False, dmarc_delivery_status
|
||||||
|
|
||||||
# Anti-spoofing
|
# Anti-spoofing
|
||||||
mailbox = get_mailbox_from_mail_from(mail_from, alias)
|
mailbox = get_mailbox_for_reply_phase(
|
||||||
|
envelope.mail_from, get_header_unicode(msg[headers.FROM]), alias
|
||||||
|
)
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
if alias.disable_email_spoofing_check:
|
if alias.disable_email_spoofing_check:
|
||||||
# ignore this error, use default alias mailbox
|
# ignore this error, use default alias mailbox
|
||||||
LOG.w(
|
LOG.w(
|
||||||
"ignore unknown sender to reverse-alias %s: %s -> %s",
|
"ignore unknown sender to reverse-alias %s: %s -> %s",
|
||||||
mail_from,
|
envelope.mail_from,
|
||||||
alias,
|
alias,
|
||||||
contact,
|
contact,
|
||||||
)
|
)
|
||||||
@ -1367,32 +1400,6 @@ def replace_original_message_id(alias: Alias, email_log: EmailLog, msg: Message)
|
|||||||
msg[headers.REFERENCES] = " ".join(new_message_ids)
|
msg[headers.REFERENCES] = " ".join(new_message_ids)
|
||||||
|
|
||||||
|
|
||||||
def get_mailbox_from_mail_from(mail_from: str, alias) -> Optional[Mailbox]:
|
|
||||||
"""return the corresponding mailbox given the mail_from and alias
|
|
||||||
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __check(email_address: str, alias: Alias) -> Optional[Mailbox]:
|
|
||||||
for mailbox in alias.mailboxes:
|
|
||||||
if mailbox.email == email_address:
|
|
||||||
return mailbox
|
|
||||||
|
|
||||||
for authorized_address in mailbox.authorized_addresses:
|
|
||||||
if authorized_address.email == email_address:
|
|
||||||
LOG.d(
|
|
||||||
"Found an authorized address for %s %s %s",
|
|
||||||
alias,
|
|
||||||
mailbox,
|
|
||||||
authorized_address,
|
|
||||||
)
|
|
||||||
return mailbox
|
|
||||||
return None
|
|
||||||
|
|
||||||
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
|
||||||
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
|
||||||
return __check(mail_from, alias) or __check(canonicalize_email(mail_from), alias)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_unknown_mailbox(
|
def handle_unknown_mailbox(
|
||||||
envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact
|
envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact
|
||||||
):
|
):
|
||||||
@ -1662,7 +1669,7 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
|
|||||||
)
|
)
|
||||||
Notification.create(
|
Notification.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
title=f"Email cannot be sent to { contact.email } from your alias { alias.email }",
|
title=f"Email cannot be sent to {contact.email} from your alias {alias.email}",
|
||||||
message=Notification.render(
|
message=Notification.render(
|
||||||
"notification/bounce-reply-phase.html",
|
"notification/bounce-reply-phase.html",
|
||||||
alias=alias,
|
alias=alias,
|
||||||
@ -1675,7 +1682,7 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
|
|||||||
user,
|
user,
|
||||||
ALERT_BOUNCE_EMAIL_REPLY_PHASE,
|
ALERT_BOUNCE_EMAIL_REPLY_PHASE,
|
||||||
mailbox.email,
|
mailbox.email,
|
||||||
f"Email cannot be sent to { contact.email } from your alias { alias.email }",
|
f"Email cannot be sent to {contact.email} from your alias {alias.email}",
|
||||||
render(
|
render(
|
||||||
"transactional/bounce/bounce-email-reply-phase.txt",
|
"transactional/bounce/bounce-email-reply-phase.txt",
|
||||||
user=user,
|
user=user,
|
||||||
@ -2355,6 +2362,7 @@ class MailHandler:
|
|||||||
"Custom/nb_rcpt_tos", len(envelope.rcpt_tos)
|
"Custom/nb_rcpt_tos", len(envelope.rcpt_tos)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
send_version_event("email_handler")
|
||||||
with create_light_app().app_context():
|
with create_light_app().app_context():
|
||||||
return_status = handle(envelope, msg)
|
return_status = handle(envelope, msg)
|
||||||
elapsed = time.time() - start
|
elapsed = time.time() - start
|
||||||
@ -2390,6 +2398,7 @@ def main(port: int):
|
|||||||
|
|
||||||
controller.start()
|
controller.start()
|
||||||
LOG.d("Start mail controller %s %s", controller.hostname, controller.port)
|
LOG.d("Start mail controller %s %s", controller.hostname, controller.port)
|
||||||
|
send_version_event("email_handler")
|
||||||
|
|
||||||
if LOAD_PGP_EMAIL_HANDLER:
|
if LOAD_PGP_EMAIL_HANDLER:
|
||||||
LOG.w("LOAD PGP keys")
|
LOG.w("LOAD PGP keys")
|
||||||
|
@ -4,6 +4,7 @@ from sys import argv, exit
|
|||||||
|
|
||||||
from app.config import EVENT_LISTENER_DB_URI
|
from app.config import EVENT_LISTENER_DB_URI
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
|
from app.monitor_utils import send_version_event
|
||||||
from events import event_debugger
|
from events import event_debugger
|
||||||
from events.runner import Runner
|
from events.runner import Runner
|
||||||
from events.event_source import DeadLetterEventSource, PostgresEventSource
|
from events.event_source import DeadLetterEventSource, PostgresEventSource
|
||||||
@ -30,9 +31,11 @@ def main(mode: Mode, dry_run: bool, max_retries: int):
|
|||||||
if mode == Mode.DEAD_LETTER:
|
if mode == Mode.DEAD_LETTER:
|
||||||
LOG.i("Using DeadLetterEventSource")
|
LOG.i("Using DeadLetterEventSource")
|
||||||
source = DeadLetterEventSource(max_retries)
|
source = DeadLetterEventSource(max_retries)
|
||||||
|
service_name = "event_listener_dead_letter"
|
||||||
elif mode == Mode.LISTENER:
|
elif mode == Mode.LISTENER:
|
||||||
LOG.i("Using PostgresEventSource")
|
LOG.i("Using PostgresEventSource")
|
||||||
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
|
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
|
||||||
|
service_name = "event_listener"
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid mode: {mode}")
|
raise ValueError(f"Invalid mode: {mode}")
|
||||||
|
|
||||||
@ -43,7 +46,8 @@ def main(mode: Mode, dry_run: bool, max_retries: int):
|
|||||||
LOG.i("Starting with HttpEventSink")
|
LOG.i("Starting with HttpEventSink")
|
||||||
sink = HttpEventSink()
|
sink = HttpEventSink()
|
||||||
|
|
||||||
runner = Runner(source=source, sink=sink)
|
send_version_event(service_name)
|
||||||
|
runner = Runner(source=source, sink=sink, service_name=service_name)
|
||||||
runner.run()
|
runner.run()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,20 +4,24 @@ import newrelic.agent
|
|||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import SyncEvent
|
from app.models import SyncEvent
|
||||||
|
from app.monitor_utils import send_version_event
|
||||||
from events.event_sink import EventSink
|
from events.event_sink import EventSink
|
||||||
from events.event_source import EventSource
|
from events.event_source import EventSource
|
||||||
|
|
||||||
|
|
||||||
class Runner:
|
class Runner:
|
||||||
def __init__(self, source: EventSource, sink: EventSink):
|
def __init__(self, source: EventSource, sink: EventSink, service_name: str = ""):
|
||||||
self.__source = source
|
self.__source = source
|
||||||
self.__sink = sink
|
self.__sink = sink
|
||||||
|
self.__service_name = service_name
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.__source.run(self.__on_event)
|
self.__source.run(self.__on_event)
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
@newrelic.agent.background_task()
|
||||||
def __on_event(self, event: SyncEvent):
|
def __on_event(self, event: SyncEvent):
|
||||||
|
if self.__service_name:
|
||||||
|
send_version_event(self.__service_name)
|
||||||
try:
|
try:
|
||||||
event_created_at = event.created_at
|
event_created_at = event.created_at
|
||||||
start_time = arrow.now()
|
start_time = arrow.now()
|
||||||
|
@ -19,7 +19,7 @@ URL=http://localhost:7777
|
|||||||
NOT_SEND_EMAIL=true
|
NOT_SEND_EMAIL=true
|
||||||
|
|
||||||
# domain used to create alias
|
# domain used to create alias
|
||||||
EMAIL_DOMAIN=sl.local
|
EMAIL_DOMAIN=sl.lan
|
||||||
|
|
||||||
# Allow SimpleLogin to enforce SPF by using the extra headers from postfix
|
# Allow SimpleLogin to enforce SPF by using the extra headers from postfix
|
||||||
# ENFORCE_SPF=true
|
# ENFORCE_SPF=true
|
||||||
@ -37,18 +37,18 @@ EMAIL_DOMAIN=sl.local
|
|||||||
# FIRST_ALIAS_DOMAIN = another-domain.com
|
# FIRST_ALIAS_DOMAIN = another-domain.com
|
||||||
|
|
||||||
# transactional email is sent from this email address
|
# transactional email is sent from this email address
|
||||||
SUPPORT_EMAIL=support@sl.local
|
SUPPORT_EMAIL=support@sl.lan
|
||||||
SUPPORT_NAME=Son from SimpleLogin
|
SUPPORT_NAME=Son from SimpleLogin
|
||||||
|
|
||||||
# To use VERP
|
# To use VERP
|
||||||
# prefix must end with + and suffix must start with +
|
# prefix must end with + and suffix must start with +
|
||||||
# BOUNCE_PREFIX = "bounces+"
|
# BOUNCE_PREFIX = "bounces+"
|
||||||
# BOUNCE_SUFFIX = "+@sl.local"
|
# BOUNCE_SUFFIX = "+@sl.lan"
|
||||||
# same as BOUNCE_PREFIX but used for reply phase. Note it doesn't have the plus sign (+) at the end.
|
# same as BOUNCE_PREFIX but used for reply phase. Note it doesn't have the plus sign (+) at the end.
|
||||||
# BOUNCE_PREFIX_FOR_REPLY_PHASE = "bounce_reply"
|
# BOUNCE_PREFIX_FOR_REPLY_PHASE = "bounce_reply"
|
||||||
|
|
||||||
# to receive general stats.
|
# to receive general stats.
|
||||||
# ADMIN_EMAIL=admin@sl.local
|
# ADMIN_EMAIL=admin@sl.lan
|
||||||
|
|
||||||
# Max number emails user can generate for free plan
|
# Max number emails user can generate for free plan
|
||||||
# Set to 5 by default
|
# Set to 5 by default
|
||||||
|
@ -6,7 +6,7 @@ from app.db import Session
|
|||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox, Contact, SLDomain, Partner
|
from app.models import Mailbox, Contact, SLDomain, Partner
|
||||||
from app.pgp_utils import load_public_key
|
from app.pgp_utils import load_public_key
|
||||||
from app.proton.utils import PROTON_PARTNER_NAME
|
from app.proton.proton_partner import PROTON_PARTNER_NAME
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
|
|
||||||
@ -56,14 +56,15 @@ def add_sl_domains():
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
def add_proton_partner():
|
def add_proton_partner() -> Partner:
|
||||||
proton_partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
proton_partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
||||||
if not proton_partner:
|
if not proton_partner:
|
||||||
Partner.create(
|
proton_partner = Partner.create(
|
||||||
name=PROTON_PARTNER_NAME,
|
name=PROTON_PARTNER_NAME,
|
||||||
contact_email="simplelogin@protonmail.com",
|
contact_email="simplelogin@protonmail.com",
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
return proton_partner
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -2,13 +2,18 @@
|
|||||||
Run scheduled jobs.
|
Run scheduled jobs.
|
||||||
Not meant for running job at precise time (+- 1h)
|
Not meant for running job at precise time (+- 1h)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
|
import newrelic.agent
|
||||||
|
from sqlalchemy.orm import Query
|
||||||
|
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||||
from sqlalchemy.sql.expression import or_, and_
|
from sqlalchemy.sql.expression import or_, and_
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app.constants import JobType
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
send_email,
|
send_email,
|
||||||
@ -21,9 +26,13 @@ from app.jobs.export_user_data_job import ExportUserDataJob
|
|||||||
from app.jobs.send_event_job import SendEventToWebhookJob
|
from app.jobs.send_event_job import SendEventToWebhookJob
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
||||||
|
from app.monitor_utils import send_version_event
|
||||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
from events.event_sink import HttpEventSink
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
|
_MAX_JOBS_PER_BATCH = 50
|
||||||
|
|
||||||
|
|
||||||
def onboarding_send_from_alias(user):
|
def onboarding_send_from_alias(user):
|
||||||
comm_email, unsubscribe_link, via_email = user.get_communication_email()
|
comm_email, unsubscribe_link, via_email = user.get_communication_email()
|
||||||
@ -164,6 +173,8 @@ def delete_mailbox_job(job: Job):
|
|||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
||||||
|
|
||||||
|
if not job.payload.get("send_mail", True):
|
||||||
|
return
|
||||||
if alias_transferred_to:
|
if alias_transferred_to:
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
@ -187,7 +198,8 @@ SimpleLogin team.
|
|||||||
|
|
||||||
|
|
||||||
def process_job(job: Job):
|
def process_job(job: Job):
|
||||||
if job.name == config.JOB_ONBOARDING_1:
|
send_version_event("job_runner")
|
||||||
|
if job.name == JobType.ONBOARDING_1.value:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
|
|
||||||
@ -196,7 +208,7 @@ def process_job(job: Job):
|
|||||||
if user and user.notification and user.activated:
|
if user and user.notification and user.activated:
|
||||||
LOG.d("send onboarding send-from-alias email to user %s", user)
|
LOG.d("send onboarding send-from-alias email to user %s", user)
|
||||||
onboarding_send_from_alias(user)
|
onboarding_send_from_alias(user)
|
||||||
elif job.name == config.JOB_ONBOARDING_2:
|
elif job.name == JobType.ONBOARDING_2.value:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
|
|
||||||
@ -205,7 +217,7 @@ def process_job(job: Job):
|
|||||||
if user and user.notification and user.activated:
|
if user and user.notification and user.activated:
|
||||||
LOG.d("send onboarding mailbox email to user %s", user)
|
LOG.d("send onboarding mailbox email to user %s", user)
|
||||||
onboarding_mailbox(user)
|
onboarding_mailbox(user)
|
||||||
elif job.name == config.JOB_ONBOARDING_4:
|
elif job.name == JobType.ONBOARDING_4.value:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user: User = User.get(user_id)
|
user: User = User.get(user_id)
|
||||||
|
|
||||||
@ -220,11 +232,11 @@ def process_job(job: Job):
|
|||||||
LOG.d("send onboarding pgp email to user %s", user)
|
LOG.d("send onboarding pgp email to user %s", user)
|
||||||
onboarding_pgp(user)
|
onboarding_pgp(user)
|
||||||
|
|
||||||
elif job.name == config.JOB_BATCH_IMPORT:
|
elif job.name == JobType.BATCH_IMPORT.value:
|
||||||
batch_import_id = job.payload.get("batch_import_id")
|
batch_import_id = job.payload.get("batch_import_id")
|
||||||
batch_import = BatchImport.get(batch_import_id)
|
batch_import = BatchImport.get(batch_import_id)
|
||||||
handle_batch_import(batch_import)
|
handle_batch_import(batch_import)
|
||||||
elif job.name == config.JOB_DELETE_ACCOUNT:
|
elif job.name == JobType.DELETE_ACCOUNT.value:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
|
|
||||||
@ -243,10 +255,10 @@ def process_job(job: Job):
|
|||||||
)
|
)
|
||||||
User.delete(user.id)
|
User.delete(user.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
elif job.name == config.JOB_DELETE_MAILBOX:
|
elif job.name == JobType.DELETE_MAILBOX.value:
|
||||||
delete_mailbox_job(job)
|
delete_mailbox_job(job)
|
||||||
|
|
||||||
elif job.name == config.JOB_DELETE_DOMAIN:
|
elif job.name == JobType.DELETE_DOMAIN.value:
|
||||||
custom_domain_id = job.payload.get("custom_domain_id")
|
custom_domain_id = job.payload.get("custom_domain_id")
|
||||||
custom_domain: Optional[CustomDomain] = CustomDomain.get(custom_domain_id)
|
custom_domain: Optional[CustomDomain] = CustomDomain.get(custom_domain_id)
|
||||||
if not custom_domain:
|
if not custom_domain:
|
||||||
@ -283,17 +295,17 @@ def process_job(job: Job):
|
|||||||
""",
|
""",
|
||||||
retries=3,
|
retries=3,
|
||||||
)
|
)
|
||||||
elif job.name == config.JOB_SEND_USER_REPORT:
|
elif job.name == JobType.SEND_USER_REPORT.value:
|
||||||
export_job = ExportUserDataJob.create_from_job(job)
|
export_job = ExportUserDataJob.create_from_job(job)
|
||||||
if export_job:
|
if export_job:
|
||||||
export_job.run()
|
export_job.run()
|
||||||
elif job.name == config.JOB_SEND_PROTON_WELCOME_1:
|
elif job.name == JobType.SEND_PROTON_WELCOME_1.value:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
if user and user.activated:
|
if user and user.activated:
|
||||||
LOG.d("Send proton welcome email to user %s", user)
|
LOG.d("Send proton welcome email to user %s", user)
|
||||||
welcome_proton(user)
|
welcome_proton(user)
|
||||||
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
|
elif job.name == JobType.SEND_ALIAS_CREATION_EVENTS.value:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
if user and user.activated:
|
if user and user.activated:
|
||||||
@ -301,52 +313,111 @@ def process_job(job: Job):
|
|||||||
send_alias_creation_events_for_user(
|
send_alias_creation_events_for_user(
|
||||||
user, dispatcher=PostgresDispatcher.get()
|
user, dispatcher=PostgresDispatcher.get()
|
||||||
)
|
)
|
||||||
elif job.name == config.JOB_SEND_EVENT_TO_WEBHOOK:
|
elif job.name == JobType.SEND_EVENT_TO_WEBHOOK.value:
|
||||||
send_job = SendEventToWebhookJob.create_from_job(job)
|
send_job = SendEventToWebhookJob.create_from_job(job)
|
||||||
if send_job:
|
if send_job:
|
||||||
send_job.run()
|
send_job.run(HttpEventSink())
|
||||||
else:
|
else:
|
||||||
LOG.e("Unknown job name %s", job.name)
|
LOG.e("Unknown job name %s", job.name)
|
||||||
|
|
||||||
|
|
||||||
def get_jobs_to_run() -> List[Job]:
|
def get_jobs_to_run_query(taken_before_time: arrow.Arrow) -> Query:
|
||||||
# Get jobs that match all conditions:
|
# Get jobs that match all conditions:
|
||||||
# - Job.state == ready OR (Job.state == taken AND Job.taken_at < now - 30 mins AND Job.attempts < 5)
|
# - Job.state == ready OR (Job.state == taken AND Job.taken_at < now - 30 mins AND Job.attempts < 5)
|
||||||
# - Job.run_at is Null OR Job.run_at < now + 10 mins
|
# - Job.run_at is Null OR Job.run_at < now + 10 mins
|
||||||
taken_at_earliest = arrow.now().shift(minutes=-config.JOB_TAKEN_RETRY_WAIT_MINS)
|
|
||||||
run_at_earliest = arrow.now().shift(minutes=+10)
|
run_at_earliest = arrow.now().shift(minutes=+10)
|
||||||
query = Job.filter(
|
return Job.filter(
|
||||||
and_(
|
and_(
|
||||||
or_(
|
or_(
|
||||||
Job.state == JobState.ready.value,
|
Job.state == JobState.ready.value,
|
||||||
and_(
|
and_(
|
||||||
Job.state == JobState.taken.value,
|
Job.state == JobState.taken.value,
|
||||||
Job.taken_at < taken_at_earliest,
|
Job.taken_at < taken_before_time,
|
||||||
Job.attempts < config.JOB_MAX_ATTEMPTS,
|
Job.attempts < config.JOB_MAX_ATTEMPTS,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
or_(Job.run_at.is_(None), and_(Job.run_at <= run_at_earliest)),
|
or_(Job.run_at.is_(None), and_(Job.run_at <= run_at_earliest)),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return query.all()
|
|
||||||
|
|
||||||
|
def get_jobs_to_run(taken_before_time: arrow.Arrow) -> List[Job]:
|
||||||
|
query = get_jobs_to_run_query(taken_before_time)
|
||||||
|
return (
|
||||||
|
query.order_by(Job.priority.desc())
|
||||||
|
.order_by(Job.run_at.asc())
|
||||||
|
.limit(_MAX_JOBS_PER_BATCH)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def take_job(job: Job, taken_before_time: arrow.Arrow) -> bool:
|
||||||
|
sql = """
|
||||||
|
UPDATE job
|
||||||
|
SET
|
||||||
|
taken_at = :taken_time,
|
||||||
|
attempts = attempts + 1,
|
||||||
|
state = :taken_state
|
||||||
|
WHERE id = :job_id
|
||||||
|
AND (state = :ready_state OR (state=:taken_state AND taken_at < :taken_before_time))
|
||||||
|
"""
|
||||||
|
args = {
|
||||||
|
"taken_time": arrow.now().datetime,
|
||||||
|
"job_id": job.id,
|
||||||
|
"ready_state": JobState.ready.value,
|
||||||
|
"taken_state": JobState.taken.value,
|
||||||
|
"taken_before_time": taken_before_time.datetime,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
res = Session.execute(sql, args)
|
||||||
|
Session.commit()
|
||||||
|
except ObjectDeletedError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return res.rowcount > 0
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
send_version_event("job_runner")
|
||||||
while True:
|
while True:
|
||||||
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
||||||
with create_light_app().app_context():
|
with create_light_app().app_context():
|
||||||
for job in get_jobs_to_run():
|
taken_before_time = arrow.now().shift(
|
||||||
|
minutes=-config.JOB_TAKEN_RETRY_WAIT_MINS
|
||||||
|
)
|
||||||
|
|
||||||
|
jobs_done = 0
|
||||||
|
for job in get_jobs_to_run(taken_before_time):
|
||||||
|
if not take_job(job, taken_before_time):
|
||||||
|
continue
|
||||||
LOG.d("Take job %s", job)
|
LOG.d("Take job %s", job)
|
||||||
|
|
||||||
# mark the job as taken, whether it will be executed successfully or not
|
try:
|
||||||
job.taken = True
|
newrelic.agent.record_custom_event("ProcessJob", {"job": job.name})
|
||||||
job.taken_at = arrow.now()
|
process_job(job)
|
||||||
job.state = JobState.taken.value
|
job_result = "success"
|
||||||
job.attempts += 1
|
|
||||||
Session.commit()
|
|
||||||
process_job(job)
|
|
||||||
|
|
||||||
job.state = JobState.done.value
|
job.state = JobState.done.value
|
||||||
|
jobs_done += 1
|
||||||
|
except Exception as e:
|
||||||
|
LOG.warn(f"Error processing job (id={job.id} name={job.name}): {e}")
|
||||||
|
|
||||||
|
# Increment manually, as the attempts increment is done by the take_job but not
|
||||||
|
# updated in our instance
|
||||||
|
job_attempts = job.attempts + 1
|
||||||
|
if job_attempts >= config.JOB_MAX_ATTEMPTS:
|
||||||
|
LOG.warn(
|
||||||
|
f"Marking job (id={job.id} name={job.name} attempts={job_attempts}) as ERROR"
|
||||||
|
)
|
||||||
|
job.state = JobState.error.value
|
||||||
|
job_result = "error"
|
||||||
|
else:
|
||||||
|
job_result = "retry"
|
||||||
|
|
||||||
|
newrelic.agent.record_custom_event(
|
||||||
|
"JobProcessed", {"job": job.name, "result": job_result}
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
time.sleep(10)
|
if jobs_done == 0:
|
||||||
|
time.sleep(10)
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
abacus
|
abacus
|
||||||
abdomen
|
|
||||||
abdominal
|
|
||||||
abide
|
abide
|
||||||
abiding
|
abiding
|
||||||
ability
|
ability
|
||||||
@ -1031,7 +1029,6 @@ chosen
|
|||||||
chowder
|
chowder
|
||||||
chowtime
|
chowtime
|
||||||
chrome
|
chrome
|
||||||
chubby
|
|
||||||
chuck
|
chuck
|
||||||
chug
|
chug
|
||||||
chummy
|
chummy
|
||||||
@ -2041,8 +2038,6 @@ dwindling
|
|||||||
dynamic
|
dynamic
|
||||||
dynamite
|
dynamite
|
||||||
dynasty
|
dynasty
|
||||||
dyslexia
|
|
||||||
dyslexic
|
|
||||||
each
|
each
|
||||||
eagle
|
eagle
|
||||||
earache
|
earache
|
||||||
@ -2081,7 +2076,6 @@ eatery
|
|||||||
eating
|
eating
|
||||||
eats
|
eats
|
||||||
ebay
|
ebay
|
||||||
ebony
|
|
||||||
ebook
|
ebook
|
||||||
ecard
|
ecard
|
||||||
eccentric
|
eccentric
|
||||||
@ -2375,8 +2369,6 @@ exclude
|
|||||||
excluding
|
excluding
|
||||||
exclusion
|
exclusion
|
||||||
exclusive
|
exclusive
|
||||||
excretion
|
|
||||||
excretory
|
|
||||||
excursion
|
excursion
|
||||||
excusable
|
excusable
|
||||||
excusably
|
excusably
|
||||||
@ -2396,8 +2388,6 @@ existing
|
|||||||
exit
|
exit
|
||||||
exodus
|
exodus
|
||||||
exonerate
|
exonerate
|
||||||
exorcism
|
|
||||||
exorcist
|
|
||||||
expand
|
expand
|
||||||
expanse
|
expanse
|
||||||
expansion
|
expansion
|
||||||
@ -2483,7 +2473,6 @@ fanning
|
|||||||
fantasize
|
fantasize
|
||||||
fantastic
|
fantastic
|
||||||
fantasy
|
fantasy
|
||||||
fascism
|
|
||||||
fastball
|
fastball
|
||||||
faster
|
faster
|
||||||
fasting
|
fasting
|
||||||
@ -3028,7 +3017,6 @@ guiding
|
|||||||
guileless
|
guileless
|
||||||
guise
|
guise
|
||||||
gulf
|
gulf
|
||||||
gullible
|
|
||||||
gully
|
gully
|
||||||
gulp
|
gulp
|
||||||
gumball
|
gumball
|
||||||
@ -3040,10 +3028,6 @@ gurgle
|
|||||||
gurgling
|
gurgling
|
||||||
guru
|
guru
|
||||||
gush
|
gush
|
||||||
gusto
|
|
||||||
gusty
|
|
||||||
gutless
|
|
||||||
guts
|
|
||||||
gutter
|
gutter
|
||||||
guy
|
guy
|
||||||
guzzler
|
guzzler
|
||||||
@ -3242,8 +3226,6 @@ humble
|
|||||||
humbling
|
humbling
|
||||||
humbly
|
humbly
|
||||||
humid
|
humid
|
||||||
humiliate
|
|
||||||
humility
|
|
||||||
humming
|
humming
|
||||||
hummus
|
hummus
|
||||||
humongous
|
humongous
|
||||||
@ -3271,7 +3253,6 @@ hurray
|
|||||||
hurricane
|
hurricane
|
||||||
hurried
|
hurried
|
||||||
hurry
|
hurry
|
||||||
hurt
|
|
||||||
husband
|
husband
|
||||||
hush
|
hush
|
||||||
husked
|
husked
|
||||||
@ -3292,8 +3273,6 @@ hypnotic
|
|||||||
hypnotism
|
hypnotism
|
||||||
hypnotist
|
hypnotist
|
||||||
hypnotize
|
hypnotize
|
||||||
hypocrisy
|
|
||||||
hypocrite
|
|
||||||
ibuprofen
|
ibuprofen
|
||||||
ice
|
ice
|
||||||
iciness
|
iciness
|
||||||
@ -3323,7 +3302,6 @@ image
|
|||||||
imaginary
|
imaginary
|
||||||
imagines
|
imagines
|
||||||
imaging
|
imaging
|
||||||
imbecile
|
|
||||||
imitate
|
imitate
|
||||||
imitation
|
imitation
|
||||||
immerse
|
immerse
|
||||||
@ -3746,7 +3724,6 @@ machine
|
|||||||
machinist
|
machinist
|
||||||
magazine
|
magazine
|
||||||
magenta
|
magenta
|
||||||
maggot
|
|
||||||
magical
|
magical
|
||||||
magician
|
magician
|
||||||
magma
|
magma
|
||||||
@ -3968,8 +3945,6 @@ multitude
|
|||||||
mumble
|
mumble
|
||||||
mumbling
|
mumbling
|
||||||
mumbo
|
mumbo
|
||||||
mummified
|
|
||||||
mummify
|
|
||||||
mumps
|
mumps
|
||||||
munchkin
|
munchkin
|
||||||
mundane
|
mundane
|
||||||
@ -4022,8 +3997,6 @@ napped
|
|||||||
napping
|
napping
|
||||||
nappy
|
nappy
|
||||||
narrow
|
narrow
|
||||||
nastily
|
|
||||||
nastiness
|
|
||||||
national
|
national
|
||||||
native
|
native
|
||||||
nativity
|
nativity
|
||||||
@ -4446,7 +4419,6 @@ pasta
|
|||||||
pasted
|
pasted
|
||||||
pastel
|
pastel
|
||||||
pastime
|
pastime
|
||||||
pastor
|
|
||||||
pastrami
|
pastrami
|
||||||
pasture
|
pasture
|
||||||
pasty
|
pasty
|
||||||
@ -4458,7 +4430,6 @@ path
|
|||||||
patience
|
patience
|
||||||
patient
|
patient
|
||||||
patio
|
patio
|
||||||
patriarch
|
|
||||||
patriot
|
patriot
|
||||||
patrol
|
patrol
|
||||||
patronage
|
patronage
|
||||||
@ -4549,7 +4520,6 @@ pettiness
|
|||||||
petty
|
petty
|
||||||
petunia
|
petunia
|
||||||
phantom
|
phantom
|
||||||
phobia
|
|
||||||
phoenix
|
phoenix
|
||||||
phonebook
|
phonebook
|
||||||
phoney
|
phoney
|
||||||
@ -4608,7 +4578,6 @@ plot
|
|||||||
plow
|
plow
|
||||||
ploy
|
ploy
|
||||||
pluck
|
pluck
|
||||||
plug
|
|
||||||
plunder
|
plunder
|
||||||
plunging
|
plunging
|
||||||
plural
|
plural
|
||||||
@ -4875,7 +4844,6 @@ pupil
|
|||||||
puppet
|
puppet
|
||||||
puppy
|
puppy
|
||||||
purchase
|
purchase
|
||||||
pureblood
|
|
||||||
purebred
|
purebred
|
||||||
purely
|
purely
|
||||||
pureness
|
pureness
|
||||||
@ -5047,7 +5015,6 @@ recharger
|
|||||||
recipient
|
recipient
|
||||||
recital
|
recital
|
||||||
recite
|
recite
|
||||||
reckless
|
|
||||||
reclaim
|
reclaim
|
||||||
recliner
|
recliner
|
||||||
reclining
|
reclining
|
||||||
@ -5440,7 +5407,6 @@ rubdown
|
|||||||
ruby
|
ruby
|
||||||
ruckus
|
ruckus
|
||||||
rudder
|
rudder
|
||||||
rug
|
|
||||||
ruined
|
ruined
|
||||||
rule
|
rule
|
||||||
rumble
|
rumble
|
||||||
@ -5448,7 +5414,6 @@ rumbling
|
|||||||
rummage
|
rummage
|
||||||
rumor
|
rumor
|
||||||
runaround
|
runaround
|
||||||
rundown
|
|
||||||
runner
|
runner
|
||||||
running
|
running
|
||||||
runny
|
runny
|
||||||
@ -5518,7 +5483,6 @@ sandpaper
|
|||||||
sandpit
|
sandpit
|
||||||
sandstone
|
sandstone
|
||||||
sandstorm
|
sandstorm
|
||||||
sandworm
|
|
||||||
sandy
|
sandy
|
||||||
sanitary
|
sanitary
|
||||||
sanitizer
|
sanitizer
|
||||||
@ -5541,7 +5505,6 @@ satisfy
|
|||||||
saturate
|
saturate
|
||||||
saturday
|
saturday
|
||||||
sauciness
|
sauciness
|
||||||
saucy
|
|
||||||
sauna
|
sauna
|
||||||
savage
|
savage
|
||||||
savanna
|
savanna
|
||||||
@ -5552,7 +5515,6 @@ savor
|
|||||||
saxophone
|
saxophone
|
||||||
say
|
say
|
||||||
scabbed
|
scabbed
|
||||||
scabby
|
|
||||||
scalded
|
scalded
|
||||||
scalding
|
scalding
|
||||||
scale
|
scale
|
||||||
@ -5587,7 +5549,6 @@ science
|
|||||||
scientist
|
scientist
|
||||||
scion
|
scion
|
||||||
scoff
|
scoff
|
||||||
scolding
|
|
||||||
scone
|
scone
|
||||||
scoop
|
scoop
|
||||||
scooter
|
scooter
|
||||||
@ -5651,8 +5612,6 @@ sedate
|
|||||||
sedation
|
sedation
|
||||||
sedative
|
sedative
|
||||||
sediment
|
sediment
|
||||||
seduce
|
|
||||||
seducing
|
|
||||||
segment
|
segment
|
||||||
seismic
|
seismic
|
||||||
seizing
|
seizing
|
||||||
@ -5899,7 +5858,6 @@ skimpily
|
|||||||
skincare
|
skincare
|
||||||
skinless
|
skinless
|
||||||
skinning
|
skinning
|
||||||
skinny
|
|
||||||
skintight
|
skintight
|
||||||
skipper
|
skipper
|
||||||
skipping
|
skipping
|
||||||
@ -6248,17 +6206,12 @@ stifle
|
|||||||
stifling
|
stifling
|
||||||
stillness
|
stillness
|
||||||
stilt
|
stilt
|
||||||
stimulant
|
|
||||||
stimulate
|
|
||||||
stimuli
|
|
||||||
stimulus
|
stimulus
|
||||||
stinger
|
stinger
|
||||||
stingily
|
stingily
|
||||||
stinging
|
stinging
|
||||||
stingray
|
stingray
|
||||||
stingy
|
stingy
|
||||||
stinking
|
|
||||||
stinky
|
|
||||||
stipend
|
stipend
|
||||||
stipulate
|
stipulate
|
||||||
stir
|
stir
|
||||||
@ -6866,7 +6819,6 @@ unbent
|
|||||||
unbiased
|
unbiased
|
||||||
unbitten
|
unbitten
|
||||||
unblended
|
unblended
|
||||||
unblessed
|
|
||||||
unblock
|
unblock
|
||||||
unbolted
|
unbolted
|
||||||
unbounded
|
unbounded
|
||||||
@ -6947,7 +6899,6 @@ undertone
|
|||||||
undertook
|
undertook
|
||||||
undertow
|
undertow
|
||||||
underuse
|
underuse
|
||||||
underwear
|
|
||||||
underwent
|
underwent
|
||||||
underwire
|
underwire
|
||||||
undesired
|
undesired
|
||||||
@ -7031,7 +6982,6 @@ uninsured
|
|||||||
uninvited
|
uninvited
|
||||||
union
|
union
|
||||||
uniquely
|
uniquely
|
||||||
unisexual
|
|
||||||
unison
|
unison
|
||||||
unissued
|
unissued
|
||||||
unit
|
unit
|
||||||
@ -7492,8 +7442,6 @@ wheat
|
|||||||
whenever
|
whenever
|
||||||
whiff
|
whiff
|
||||||
whimsical
|
whimsical
|
||||||
whinny
|
|
||||||
whiny
|
|
||||||
whisking
|
whisking
|
||||||
whoever
|
whoever
|
||||||
whole
|
whole
|
||||||
@ -7599,7 +7547,6 @@ wrongness
|
|||||||
wrought
|
wrought
|
||||||
xbox
|
xbox
|
||||||
xerox
|
xerox
|
||||||
yahoo
|
|
||||||
yam
|
yam
|
||||||
yanking
|
yanking
|
||||||
yapping
|
yapping
|
||||||
|
35
app/migrations/versions/2024_112619_085f77996ce3_.py
Normal file
35
app/migrations/versions/2024_112619_085f77996ce3_.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 085f77996ce3
|
||||||
|
Revises: 0f3ee15b0014
|
||||||
|
Create Date: 2024-11-26 19:20:32.227899
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '085f77996ce3'
|
||||||
|
down_revision = '0f3ee15b0014'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('partner_subscription', sa.Column('lifetime', sa.Boolean(), server_default='0', nullable=False))
|
||||||
|
op.alter_column('partner_subscription', 'end_at',
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('partner_subscription', 'end_at',
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=False)
|
||||||
|
op.drop_column('partner_subscription', 'lifetime')
|
||||||
|
# ### end Alembic commands ###
|
@ -0,0 +1,91 @@
|
|||||||
|
"""index cleanup
|
||||||
|
|
||||||
|
Revision ID: d3ff8848c930
|
||||||
|
Revises: 085f77996ce3
|
||||||
|
Create Date: 2025-01-30 15:00:02.995813
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "d3ff8848c930"
|
||||||
|
down_revision = "085f77996ce3"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index("ix_alias_hibp_alias_id", table_name="alias_hibp")
|
||||||
|
op.drop_index("ix_alias_mailbox_alias_id", table_name="alias_mailbox")
|
||||||
|
op.drop_index("ix_alias_used_on_alias_id", table_name="alias_used_on")
|
||||||
|
op.drop_index("ix_api_key_code", table_name="api_key")
|
||||||
|
op.drop_index(
|
||||||
|
"ix_auto_create_rule_custom_domain_id", table_name="auto_create_rule"
|
||||||
|
)
|
||||||
|
op.drop_index("ix_contact_alias_id", table_name="contact")
|
||||||
|
op.create_index(
|
||||||
|
"ix_email_log_user_id_email_log_id",
|
||||||
|
"email_log",
|
||||||
|
["user_id", "id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.drop_index("ix_job_state", table_name="job")
|
||||||
|
op.create_index(
|
||||||
|
"ix_mailbox_email_trgm_idx",
|
||||||
|
"mailbox",
|
||||||
|
["email"],
|
||||||
|
unique=False,
|
||||||
|
postgresql_ops={"email": "gin_trgm_ops"},
|
||||||
|
postgresql_using="gin",
|
||||||
|
)
|
||||||
|
op.drop_index("ix_partner_user_partner_id", table_name="partner_user")
|
||||||
|
op.create_index(
|
||||||
|
"ix_sent_alert_alert_type", "sent_alert", ["alert_type"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_sent_alert_to_email", "sent_alert", ["to_email"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_users_email_trgm",
|
||||||
|
"users",
|
||||||
|
["email"],
|
||||||
|
unique=False,
|
||||||
|
postgresql_ops={"email": "gin_trgm_ops"},
|
||||||
|
postgresql_using="gin",
|
||||||
|
)
|
||||||
|
op.drop_index("ix_users_activated", table_name="users")
|
||||||
|
op.drop_index("ix_mailbox_user_id", table_name="users")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index("ix_users_activated", "users", ["activated"], unique=False)
|
||||||
|
op.drop_index("idx_users_email_trgm", table_name="users")
|
||||||
|
op.drop_index("ix_sent_alert_to_email", table_name="sent_alert")
|
||||||
|
op.drop_index("ix_sent_alert_alert_type", table_name="sent_alert")
|
||||||
|
op.create_index(
|
||||||
|
"ix_partner_user_partner_id", "partner_user", ["partner_id"], unique=False
|
||||||
|
)
|
||||||
|
op.drop_index("ix_mailbox_email_trgm_idx", table_name="mailbox")
|
||||||
|
op.create_index("ix_job_state", "job", ["state"], unique=False)
|
||||||
|
op.drop_index("ix_email_log_user_id_email_log_id", table_name="email_log")
|
||||||
|
op.create_index("ix_contact_alias_id", "contact", ["alias_id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
"ix_auto_create_rule_custom_domain_id",
|
||||||
|
"auto_create_rule",
|
||||||
|
["custom_domain_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index("ix_api_key_code", "api_key", ["code"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
"ix_alias_used_on_alias_id", "alias_used_on", ["alias_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_alias_mailbox_alias_id", "alias_mailbox", ["alias_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_alias_hibp_alias_id", "alias_hibp", ["alias_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index("ix_mailbox_user_id", "users", ["user_id"], unique=False)
|
@ -0,0 +1,23 @@
|
|||||||
|
"""index cleanup
|
||||||
|
|
||||||
|
Revision ID: 97edba8794f8
|
||||||
|
Revises: d3ff8848c930
|
||||||
|
Create Date: 2025-01-31 14:42:22.590597
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '97edba8794f8'
|
||||||
|
down_revision = 'd3ff8848c930'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.drop_index('ix_email_log_user_id', table_name='email_log')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.create_index('ix_email_log_user_id', 'email_log', ['user_id'], unique=False)
|
@ -0,0 +1,27 @@
|
|||||||
|
"""contact index
|
||||||
|
|
||||||
|
Revision ID: 20e7d3ca289a
|
||||||
|
Revises: 97edba8794f8
|
||||||
|
Create Date: 2025-02-03 16:52:06.775032
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '20e7d3ca289a'
|
||||||
|
down_revision = '97edba8794f8'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_contact_user_id_id', 'contact', ['user_id', 'id'], unique=False)
|
||||||
|
op.drop_index('ix_contact_user_id', table_name='contact')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_contact_user_id', 'contact', ['user_id'], unique=False)
|
||||||
|
op.drop_index('ix_contact_user_id_id', table_name='contact')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""job priorities
|
||||||
|
|
||||||
|
Revision ID: fd79503179dd
|
||||||
|
Revises: 20e7d3ca289a
|
||||||
|
Create Date: 2025-02-25 15:39:24.833973
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'fd79503179dd'
|
||||||
|
down_revision = '20e7d3ca289a'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.add_column('job', sa.Column('priority', sa.Integer(), server_default='50', nullable=False))
|
||||||
|
op.create_index('ix_state_run_at_taken_at_priority', 'job', ['state', 'run_at', 'taken_at', 'priority'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.drop_index('ix_state_run_at_taken_at', table_name='job', postgresql_concurrently=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_state_run_at_taken_at_priority', table_name='job', postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_state_run_at_taken_at', 'job', ['state', 'run_at', 'taken_at'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.drop_column('job', 'priority')
|
@ -7,8 +7,11 @@ from typing import List, Dict
|
|||||||
import arrow
|
import arrow
|
||||||
import newrelic.agent
|
import newrelic.agent
|
||||||
|
|
||||||
|
from app.models import JobState
|
||||||
|
from app.config import JOB_MAX_ATTEMPTS, JOB_TAKEN_RETRY_WAIT_MINS
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
|
from job_runner import get_jobs_to_run_query
|
||||||
from monitor.metric_exporter import MetricExporter
|
from monitor.metric_exporter import MetricExporter
|
||||||
|
|
||||||
# the number of consecutive fails
|
# the number of consecutive fails
|
||||||
@ -154,6 +157,38 @@ def log_failed_events():
|
|||||||
newrelic.agent.record_custom_metric("Custom/sync_events_failed", failed_events)
|
newrelic.agent.record_custom_metric("Custom/sync_events_failed", failed_events)
|
||||||
|
|
||||||
|
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def log_jobs_to_run():
|
||||||
|
taken_before_time = arrow.now().shift(minutes=-JOB_TAKEN_RETRY_WAIT_MINS)
|
||||||
|
query = get_jobs_to_run_query(taken_before_time)
|
||||||
|
count = query.count()
|
||||||
|
LOG.d(f"Pending jobs to run: {count}")
|
||||||
|
newrelic.agent.record_custom_metric("Custom/jobs_to_run", count)
|
||||||
|
|
||||||
|
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def log_failed_jobs():
|
||||||
|
r = Session.execute(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM job
|
||||||
|
WHERE (
|
||||||
|
state = :error_state
|
||||||
|
OR (state = :taken_state AND attempts >= :max_attempts)
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
{
|
||||||
|
"error_state": JobState.error.value,
|
||||||
|
"taken_state": JobState.taken.value,
|
||||||
|
"max_attempts": JOB_MAX_ATTEMPTS,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
failed_jobs = list(r)[0][0]
|
||||||
|
|
||||||
|
LOG.d(f"Failed jobs: {failed_jobs}")
|
||||||
|
newrelic.agent.record_custom_metric("Custom/failed_jobs", failed_jobs)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
exporter = MetricExporter(get_newrelic_license())
|
exporter = MetricExporter(get_newrelic_license())
|
||||||
while True:
|
while True:
|
||||||
@ -163,6 +198,8 @@ if __name__ == "__main__":
|
|||||||
log_events_pending_dead_letter()
|
log_events_pending_dead_letter()
|
||||||
log_failed_events()
|
log_failed_events()
|
||||||
log_nb_db_connection_by_app_name()
|
log_nb_db_connection_by_app_name()
|
||||||
|
log_jobs_to_run()
|
||||||
|
log_failed_jobs()
|
||||||
Session.close()
|
Session.close()
|
||||||
|
|
||||||
exporter.run()
|
exporter.run()
|
||||||
|
@ -5,6 +5,7 @@ The step-to-step guide can be found on https://simplelogin.io/docs/siwsl/app/
|
|||||||
This example is based on
|
This example is based on
|
||||||
https://requests-oauthlib.readthedocs.io/en/latest/examples/real_world_example.html
|
https://requests-oauthlib.readthedocs.io/en/latest/examples/real_world_example.html
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from flask import Flask, request, redirect, session, url_for
|
from flask import Flask, request, redirect, session, url_for
|
||||||
|
@ -34,4 +34,4 @@ for i in range(tests):
|
|||||||
|
|
||||||
end = time.time()
|
end = time.time()
|
||||||
time_taken = end - start
|
time_taken = end - start
|
||||||
print(f"Took {time_taken} -> {time_taken/tests} per test")
|
print(f"Took {time_taken} -> {time_taken / tests} per test")
|
||||||
|
123
app/oneshot/schedule_sync_user_job.py
Normal file
123
app/oneshot/schedule_sync_user_job.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy import func
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.jobs.send_event_job import SendEventToWebhookJob
|
||||||
|
from app.db import Session
|
||||||
|
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||||
|
from app.models import PartnerUser, User
|
||||||
|
|
||||||
|
|
||||||
|
def process(start_pu_id: int, end_pu_id: int, step: int, only_lifetime: bool):
|
||||||
|
print(
|
||||||
|
f"Checking partner user {start_pu_id} to {end_pu_id} (step={step}) (only_lifetime={only_lifetime})"
|
||||||
|
)
|
||||||
|
start_time = time.time()
|
||||||
|
with_lifetime = 0
|
||||||
|
with_plan = 0
|
||||||
|
with_free = 0
|
||||||
|
for batch_start in range(start_pu_id, end_pu_id, step):
|
||||||
|
query = (
|
||||||
|
Session.query(User)
|
||||||
|
.join(PartnerUser, PartnerUser.user_id == User.id)
|
||||||
|
.filter(PartnerUser.id >= batch_start, PartnerUser.id < batch_start + step)
|
||||||
|
)
|
||||||
|
if only_lifetime:
|
||||||
|
query = query.filter(
|
||||||
|
User.lifetime == True, # noqa :E712
|
||||||
|
)
|
||||||
|
users = query.all()
|
||||||
|
for user in users:
|
||||||
|
# Just in case the == True cond is wonky
|
||||||
|
if user.lifetime:
|
||||||
|
event = UserPlanChanged(lifetime=True)
|
||||||
|
with_lifetime += 1
|
||||||
|
else:
|
||||||
|
plan_end = user.get_active_subscription_end(
|
||||||
|
include_partner_subscription=False
|
||||||
|
)
|
||||||
|
if plan_end:
|
||||||
|
event = UserPlanChanged(plan_end_time=plan_end.timestamp)
|
||||||
|
with_plan += 1
|
||||||
|
else:
|
||||||
|
event = UserPlanChanged()
|
||||||
|
with_free += 1
|
||||||
|
job = SendEventToWebhookJob(
|
||||||
|
user=user, event=EventContent(user_plan_change=event)
|
||||||
|
)
|
||||||
|
job.store_job_in_db(run_at=None, commit=False)
|
||||||
|
Session.flush()
|
||||||
|
Session.commit()
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
last_batch_id = batch_start + step
|
||||||
|
time_per_user = elapsed / last_batch_id
|
||||||
|
remaining = end_pu_id - last_batch_id
|
||||||
|
time_remaining = remaining / time_per_user
|
||||||
|
hours_remaining = time_remaining / 60.0
|
||||||
|
print(
|
||||||
|
f"PartnerUser {batch_start}/{end_pu_id} lifetime {with_lifetime} paid {with_plan} free {with_free} {hours_remaining:.2f} mins remaining"
|
||||||
|
)
|
||||||
|
print(f"Sent lifetime {with_lifetime} paid {with_plan} free {with_free}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="Schedule Sync User Jobs", description="Create jobs to sync users"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--start_pu_id", default=0, type=int, help="Initial partner_user_id"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
||||||
|
)
|
||||||
|
parser.add_argument("-t", "--step", default=10000, type=int, help="Step to use")
|
||||||
|
parser.add_argument("-u", "--user", default="", type=str, help="User to sync")
|
||||||
|
parser.add_argument(
|
||||||
|
"-l", "--lifetime", action="store_true", help="Only sync lifetime users"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
start_pu_id = args.start_pu_id
|
||||||
|
end_pu_id = args.end_pu_id
|
||||||
|
user_id = args.user
|
||||||
|
only_lifetime = args.lifetime
|
||||||
|
step = args.step
|
||||||
|
|
||||||
|
if not end_pu_id:
|
||||||
|
end_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
try:
|
||||||
|
user_id = int(user_id)
|
||||||
|
except ValueError:
|
||||||
|
user = User.get_by(email=user_id)
|
||||||
|
if not user:
|
||||||
|
print(f"User {user_id} not found")
|
||||||
|
sys.exit(1)
|
||||||
|
user_id = user.id
|
||||||
|
print(f"Limiting to user {user_id}")
|
||||||
|
partner_user: Optional[PartnerUser] = PartnerUser.get_by(user_id=user_id)
|
||||||
|
if not partner_user:
|
||||||
|
print(f"Could not find PartnerUser for user_id={user_id}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# So we only have one loop
|
||||||
|
step = 1
|
||||||
|
start_pu_id = partner_user.id
|
||||||
|
end_pu_id = partner_user.id
|
||||||
|
|
||||||
|
process(
|
||||||
|
start_pu_id=start_pu_id,
|
||||||
|
end_pu_id=end_pu_id,
|
||||||
|
step=step,
|
||||||
|
only_lifetime=only_lifetime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -1,14 +1,14 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import arrow
|
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||||
from app.models import PartnerUser, User
|
from app.models import PartnerUser, User
|
||||||
from app.db import Session
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog="Backfill alias", description="Send lifetime users to proton"
|
prog="Backfill alias", description="Send lifetime users to proton"
|
||||||
@ -19,34 +19,69 @@ parser.add_argument(
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
||||||
)
|
)
|
||||||
|
parser.add_argument("-t", "--step", default=10000, type=int, help="Step to use")
|
||||||
|
parser.add_argument("-u", "--user", default="", type=str, help="User to sync")
|
||||||
|
parser.add_argument(
|
||||||
|
"-l", "--lifetime", action="store_true", help="Only sync lifetime users"
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
pu_id_start = args.start_pu_id
|
pu_id_start = args.start_pu_id
|
||||||
max_pu_id = args.end_pu_id
|
max_pu_id = args.end_pu_id
|
||||||
|
user_id = args.user
|
||||||
|
only_lifetime = args.lifetime
|
||||||
|
step = args.step
|
||||||
|
|
||||||
if max_pu_id == 0:
|
if max_pu_id == 0:
|
||||||
max_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
max_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
try:
|
||||||
|
user_id = int(user_id)
|
||||||
|
except ValueError:
|
||||||
|
user = User.get_by(email=user_id)
|
||||||
|
if not user:
|
||||||
|
print(f"User {user_id} not found")
|
||||||
|
sys.exit(1)
|
||||||
|
print(f"Limiting to user {user_id}")
|
||||||
|
user_id = user.id
|
||||||
|
# So we only have one loop
|
||||||
|
step = max_pu_id
|
||||||
|
|
||||||
print(f"Checking partner user {pu_id_start} to {max_pu_id}")
|
print(f"Checking partner user {pu_id_start} to {max_pu_id}")
|
||||||
step = 1000
|
|
||||||
done = 0
|
done = 0
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
with_lifetime = 0
|
with_lifetime = 0
|
||||||
|
with_plan = 0
|
||||||
|
with_free = 0
|
||||||
for batch_start in range(pu_id_start, max_pu_id, step):
|
for batch_start in range(pu_id_start, max_pu_id, step):
|
||||||
users = (
|
query = Session.query(User).join(PartnerUser, PartnerUser.user_id == User.id)
|
||||||
Session.query(User)
|
if user_id:
|
||||||
.join(PartnerUser, PartnerUser.user_id == User.id)
|
query = query.filter(User.id == user_id)
|
||||||
.filter(
|
else:
|
||||||
PartnerUser.id >= batch_start,
|
query = query.filter(
|
||||||
PartnerUser.id < batch_start + step,
|
PartnerUser.id >= batch_start, PartnerUser.id < batch_start + step
|
||||||
|
)
|
||||||
|
if only_lifetime:
|
||||||
|
query = query.filter(
|
||||||
User.lifetime == True, # noqa :E712
|
User.lifetime == True, # noqa :E712
|
||||||
)
|
)
|
||||||
).all()
|
users = query.all()
|
||||||
for user in users:
|
for user in users:
|
||||||
# Just in case the == True cond is wonky
|
# Just in case the == True cond is wonky
|
||||||
if not user.lifetime:
|
if user.lifetime:
|
||||||
continue
|
event = UserPlanChanged(lifetime=True)
|
||||||
with_lifetime += 1
|
with_lifetime += 1
|
||||||
event = UserPlanChanged(plan_end_time=arrow.get("2038-01-01").timestamp)
|
else:
|
||||||
|
plan_end = user.get_active_subscription_end(
|
||||||
|
include_partner_subscription=False
|
||||||
|
)
|
||||||
|
if plan_end:
|
||||||
|
event = UserPlanChanged(plan_end_time=plan_end.timestamp)
|
||||||
|
with_plan += 1
|
||||||
|
else:
|
||||||
|
event = UserPlanChanged()
|
||||||
|
with_free += 1
|
||||||
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||||
Session.flush()
|
Session.flush()
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -57,6 +92,6 @@ for batch_start in range(pu_id_start, max_pu_id, step):
|
|||||||
time_remaining = remaining / time_per_alias
|
time_remaining = remaining / time_per_alias
|
||||||
hours_remaining = time_remaining / 60.0
|
hours_remaining = time_remaining / 60.0
|
||||||
print(
|
print(
|
||||||
f"\PartnerUser {batch_start}/{max_pu_id} {with_lifetime} {hours_remaining:.2f} mins remaining"
|
f"artnerUser {batch_start}/{max_pu_id} lifetime {with_lifetime} paid {with_plan} free {with_free} {hours_remaining:.2f} mins remaining"
|
||||||
)
|
)
|
||||||
print(f"With SL lifetime {with_lifetime}")
|
print(f"Sent lifetime {with_lifetime} paid {with_plan} free {with_free}")
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import arrow
|
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
|
||||||
from app.account_linking import send_user_plan_changed_event
|
from app.account_linking import send_user_plan_changed_event
|
||||||
@ -38,9 +37,9 @@ for batch_start in range(pu_id_start, max_pu_id, step):
|
|||||||
)
|
)
|
||||||
).all()
|
).all()
|
||||||
for partner_user in partner_users:
|
for partner_user in partner_users:
|
||||||
subscription_end = send_user_plan_changed_event(partner_user)
|
event = send_user_plan_changed_event(partner_user)
|
||||||
if subscription_end is not None:
|
if event is not None:
|
||||||
if subscription_end > arrow.get("2038-01-01").timestamp:
|
if event.lifetime:
|
||||||
with_lifetime += 1
|
with_lifetime += 1
|
||||||
else:
|
else:
|
||||||
with_premium += 1
|
with_premium += 1
|
||||||
|
@ -4,6 +4,7 @@ package simplelogin_events;
|
|||||||
|
|
||||||
message UserPlanChanged {
|
message UserPlanChanged {
|
||||||
uint32 plan_end_time = 1;
|
uint32 plan_end_time = 1;
|
||||||
|
bool lifetime = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UserDeleted {
|
message UserDeleted {
|
||||||
@ -33,6 +34,9 @@ message AliasCreatedList {
|
|||||||
repeated AliasCreated events = 1;
|
repeated AliasCreated events = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message UserUnlinked {
|
||||||
|
}
|
||||||
|
|
||||||
message EventContent {
|
message EventContent {
|
||||||
oneof content {
|
oneof content {
|
||||||
UserPlanChanged user_plan_change = 1;
|
UserPlanChanged user_plan_change = 1;
|
||||||
@ -41,6 +45,7 @@ message EventContent {
|
|||||||
AliasStatusChanged alias_status_change = 4;
|
AliasStatusChanged alias_status_change = 4;
|
||||||
AliasDeleted alias_deleted = 5;
|
AliasDeleted alias_deleted = 5;
|
||||||
AliasCreatedList alias_create_list = 6;
|
AliasCreatedList alias_create_list = 6;
|
||||||
|
UserUnlinked user_unlinked = 7;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,33 +1,120 @@
|
|||||||
|
[project]
|
||||||
|
name = "SimpleLogin"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "SimpleLogin partner API"
|
||||||
|
authors = [ {name="SimpleLogin", email="dev@simplelogin.io"}]
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/simple-login/app"
|
||||||
|
keywords = ["email", "alias", "privacy", "oauth2", "openid"]
|
||||||
|
packages = [
|
||||||
|
{ include = "app/" },
|
||||||
|
{ include = "migrations/" },
|
||||||
|
]
|
||||||
|
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
||||||
|
|
||||||
|
requires-python = "~=3.12"
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"flask ~= 1.1.2",
|
||||||
|
"flask_login ~= 0.5.0",
|
||||||
|
"wtforms ~= 2.3.3",
|
||||||
|
"unidecode ~= 1.1.1",
|
||||||
|
"gunicorn ~= 20.0.4",
|
||||||
|
"bcrypt ~= 3.2.0",
|
||||||
|
"python-dotenv ~= 0.14.0",
|
||||||
|
"ipython ~= 7.31.1",
|
||||||
|
"sqlalchemy_utils ~= 0.36.8",
|
||||||
|
"psycopg2-binary ~= 2.9.10",
|
||||||
|
"sentry_sdk ~= 2.20.0",
|
||||||
|
"blinker ~= 1.9.0",
|
||||||
|
"arrow ~= 0.16.0",
|
||||||
|
"Flask-WTF ~= 0.14.3",
|
||||||
|
"boto3 ~= 1.35.37",
|
||||||
|
"Flask-Migrate ~= 2.5.3",
|
||||||
|
"flask_admin ~= 1.5.6",
|
||||||
|
"flask-cors ~= 3.0.9",
|
||||||
|
"watchtower ~= 0.8.0",
|
||||||
|
"sqlalchemy-utils == 0.36.8",
|
||||||
|
"jwcrypto ~= 0.8",
|
||||||
|
"yacron~=0.19.0",
|
||||||
|
"flask-debugtoolbar ~= 0.11.0",
|
||||||
|
"requests_oauthlib ~= 1.3.0",
|
||||||
|
"pyopenssl ~= 19.1.0",
|
||||||
|
"aiosmtpd ~= 1.2",
|
||||||
|
"dnspython ~= 2.7.0",
|
||||||
|
"coloredlogs ~= 14.0",
|
||||||
|
"pycryptodome ~= 3.9.8",
|
||||||
|
"phpserialize ~= 1.3",
|
||||||
|
"dkimpy == 1.0.5",
|
||||||
|
"pyotp ~= 2.4.0",
|
||||||
|
"flask_profiler ~= 1.8.1",
|
||||||
|
"facebook-sdk ~= 3.1.0",
|
||||||
|
"google-api-python-client ~= 1.12.3",
|
||||||
|
"google-auth-httplib2 ~= 0.0.4",
|
||||||
|
"python-gnupg ~= 0.4.6",
|
||||||
|
"webauthn ~= 0.4.7",
|
||||||
|
|
||||||
|
# Git dependency until pyspf creates a new release
|
||||||
|
#"pyspf @ git+https://github.com/sdgathman/pyspf.git@665a6df079485a9824be0829e7d71088453db7f6",
|
||||||
|
"pyspf ~= 2.0.14",
|
||||||
|
|
||||||
|
"Flask-Limiter == 1.5",
|
||||||
|
"memory_profiler ~= 0.57.0",
|
||||||
|
"gevent ~= 24.11.1",
|
||||||
|
"email-validator ~= 2.2.0",
|
||||||
|
"PGPy == 0.5.4",
|
||||||
|
"coinbase-commerce ~= 1.0.1",
|
||||||
|
"requests ~= 2.25.1",
|
||||||
|
"newrelic ~= 8.8.0",
|
||||||
|
"flanker ~= 0.9.11",
|
||||||
|
"pyre2 ~= 0.3.6",
|
||||||
|
"tldextract ~= 3.1.2",
|
||||||
|
"flask-debugtoolbar-sqlalchemy ~= 0.2.0",
|
||||||
|
"twilio ~= 7.3.2",
|
||||||
|
"Deprecated ~= 1.2.13",
|
||||||
|
"MarkupSafe~=1.1.1",
|
||||||
|
"cryptography ~= 37.0.1",
|
||||||
|
"SQLAlchemy ~= 1.3.24",
|
||||||
|
"redis==5.2.1",
|
||||||
|
"newrelic-telemetry-sdk ~= 0.5.0",
|
||||||
|
"aiospamc == 0.10",
|
||||||
|
"itsdangerous ~= 1.1.0",
|
||||||
|
"werkzeug ~= 1.0.1",
|
||||||
|
"alembic ~= 1.4.3",
|
||||||
|
"limits ~= 4.0.1",
|
||||||
|
"strictyaml ~= 1.7.3",
|
||||||
|
]
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
target-version = ['py310']
|
target-version = ['py312']
|
||||||
exclude = '''
|
exclude = '''
|
||||||
(
|
(
|
||||||
/(
|
/(
|
||||||
\.eggs # exclude a few common directories in the
|
\.eggs # exclude a few common directories in the
|
||||||
| \.git # root of the project
|
| \.git # root of the project
|
||||||
| \.hg
|
| \.hg
|
||||||
| \.mypy_cache
|
| \.mypy_cache
|
||||||
| \.tox
|
| \.tox
|
||||||
| \.venv
|
| \.venv
|
||||||
| _build
|
| _build
|
||||||
| buck-out
|
| buck-out
|
||||||
| build
|
| build
|
||||||
| dist
|
| dist
|
||||||
| migrations # migrations/ is generated by alembic
|
| migrations # migrations/ is generated by alembic
|
||||||
| app/events/generated
|
| app/events/generated
|
||||||
)/
|
)/
|
||||||
)
|
)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
ignore-init-module-imports = true
|
|
||||||
exclude = [".venv", "migrations", "app/events/generated"]
|
exclude = [".venv", "migrations", "app/events/generated"]
|
||||||
|
[tool.ruff.lint]
|
||||||
|
ignore-init-module-imports = true
|
||||||
|
|
||||||
[tool.djlint]
|
[tool.djlint]
|
||||||
indent = 2
|
indent = 2
|
||||||
profile = "jinja"
|
profile = "jinja"
|
||||||
blank_line_after_tag = "if,for,include,load,extends,block,endcall"
|
blank_line_after_tag = "if,for,include,load,extends,block,endcall"
|
||||||
|
|
||||||
# H006: Images should have a height attribute
|
# H006: Images should have a height attribute
|
||||||
# H013: Images should have an alt attribute
|
# H013: Images should have an alt attribute
|
||||||
# H016: Missing title tag in html. | False positive on template
|
# H016: Missing title tag in html. | False positive on template
|
||||||
@ -43,92 +130,26 @@ blank_line_after_tag = "if,for,include,load,extends,block,endcall"
|
|||||||
# T001: Variables should be wrapped in a single whitespace. | Messes up with comments
|
# T001: Variables should be wrapped in a single whitespace. | Messes up with comments
|
||||||
ignore = "H006,H013,H016,H017,H019,H021,H025,H030,H031,T003,J004,J018,T001"
|
ignore = "H006,H013,H016,H017,H019,H021,H025,H030,H031,T003,J004,J018,T001"
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.uv]
|
||||||
name = "SimpleLogin"
|
dev-dependencies = [
|
||||||
version = "0.1.0"
|
"pytest ~= 7.0.0",
|
||||||
description = "open-source email alias solution"
|
"pytest-cov ~= 3.0.0",
|
||||||
authors = ["SimpleLogin <dev@simplelogin.io>"]
|
"pre-commit ~= 4.1.0",
|
||||||
license = "MIT"
|
"black ~= 22.1.0",
|
||||||
repository = "https://github.com/simple-login/app"
|
"djlint==1.34.1",
|
||||||
keywords = ["email", "alias", "privacy", "oauth2", "openid"]
|
"pylint ~= 2.14.4",
|
||||||
packages = [
|
"ruff ~= 0.1.5",
|
||||||
{ include = "app/" },
|
|
||||||
{ include = "migrations/" },
|
|
||||||
]
|
]
|
||||||
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "^3.10"
|
|
||||||
flask = "^1.1.2"
|
|
||||||
flask_login = "^0.5.0"
|
|
||||||
wtforms = "^2.3.3"
|
|
||||||
unidecode = "^1.1.1"
|
|
||||||
gunicorn = "^20.0.4"
|
|
||||||
bcrypt = "^3.2.0"
|
|
||||||
python-dotenv = "^0.14.0"
|
|
||||||
ipython = "^7.31.1"
|
|
||||||
sqlalchemy_utils = "^0.36.8"
|
|
||||||
psycopg2-binary = "^2.9.3"
|
|
||||||
sentry_sdk = "^2.16.0"
|
|
||||||
blinker = "^1.4"
|
|
||||||
arrow = "^0.16.0"
|
|
||||||
Flask-WTF = "^0.14.3"
|
|
||||||
boto3 = "^1.15.9"
|
|
||||||
Flask-Migrate = "^2.5.3"
|
|
||||||
flask_admin = "^1.5.6"
|
|
||||||
flask-cors = "^3.0.9"
|
|
||||||
watchtower = "^0.8.0"
|
|
||||||
sqlalchemy-utils = "^0.36.8"
|
|
||||||
jwcrypto = "^0.8"
|
|
||||||
yacron = "^0.11.1"
|
|
||||||
flask-debugtoolbar = "^0.11.0"
|
|
||||||
requests_oauthlib = "^1.3.0"
|
|
||||||
pyopenssl = "^19.1.0"
|
|
||||||
aiosmtpd = "^1.2"
|
|
||||||
dnspython = "^2.0.0"
|
|
||||||
coloredlogs = "^14.0"
|
|
||||||
pycryptodome = "^3.9.8"
|
|
||||||
phpserialize = "^1.3"
|
|
||||||
dkimpy = "^1.0.5"
|
|
||||||
pyotp = "^2.4.0"
|
|
||||||
flask_profiler = "^1.8.1"
|
|
||||||
facebook-sdk = "^3.1.0"
|
|
||||||
google-api-python-client = "^1.12.3"
|
|
||||||
google-auth-httplib2 = "^0.0.4"
|
|
||||||
python-gnupg = "^0.4.6"
|
|
||||||
webauthn = "^0.4.7"
|
|
||||||
pyspf = "^2.0.14"
|
|
||||||
Flask-Limiter = "^1.4"
|
|
||||||
memory_profiler = "^0.57.0"
|
|
||||||
gevent = "22.10.2"
|
|
||||||
email_validator = "^1.1.1"
|
|
||||||
PGPy = "0.5.4"
|
|
||||||
coinbase-commerce = "^1.0.1"
|
|
||||||
requests = "^2.25.1"
|
|
||||||
newrelic = "8.8.0"
|
|
||||||
flanker = "^0.9.11"
|
|
||||||
pyre2 = "^0.3.6"
|
|
||||||
tldextract = "^3.1.2"
|
|
||||||
flask-debugtoolbar-sqlalchemy = "^0.2.0"
|
|
||||||
twilio = "^7.3.2"
|
|
||||||
Deprecated = "^1.2.13"
|
|
||||||
cryptography = "37.0.1"
|
|
||||||
SQLAlchemy = "1.3.24"
|
|
||||||
redis = "^4.5.3"
|
|
||||||
newrelic-telemetry-sdk = "^0.5.0"
|
|
||||||
aiospamc = "0.10"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
pytest = "^7.0.0"
|
|
||||||
pytest-cov = "^3.0.0"
|
|
||||||
black = "^22.1.0"
|
|
||||||
djlint = "^1.3.0"
|
|
||||||
pylint = "^2.14.4"
|
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
ruff = "^0.1.5"
|
|
||||||
pre-commit = "^3.8.0"
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry>=0.12"]
|
requires = ["hatchling"]
|
||||||
build-backend = "poetry.masonry.api"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[tool.hatch.metadata]
|
||||||
|
allow-direct-references = true
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = ["app", "local_data", "migrations", "templates"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["app", "local_data", "migrations", "templates"]
|
||||||
|
469
app/requirements-dev.lock
Normal file
469
app/requirements-dev.lock
Normal file
@ -0,0 +1,469 @@
|
|||||||
|
# generated by rye
|
||||||
|
# use `rye lock` or `rye sync` to update this lockfile
|
||||||
|
#
|
||||||
|
# last locked with the following flags:
|
||||||
|
# pre: false
|
||||||
|
# features: []
|
||||||
|
# all-features: false
|
||||||
|
# with-sources: false
|
||||||
|
# generate-hashes: false
|
||||||
|
# universal: false
|
||||||
|
|
||||||
|
-e file:.
|
||||||
|
aiohappyeyeballs==2.4.4
|
||||||
|
# via aiohttp
|
||||||
|
aiohttp==3.11.11
|
||||||
|
# via yacron
|
||||||
|
aiosignal==1.3.2
|
||||||
|
# via aiohttp
|
||||||
|
aiosmtpd==1.4.6
|
||||||
|
# via simplelogin
|
||||||
|
aiosmtplib==3.0.2
|
||||||
|
# via yacron
|
||||||
|
aiospamc==0.10.0
|
||||||
|
# via simplelogin
|
||||||
|
alembic==1.14.0
|
||||||
|
# via flask-migrate
|
||||||
|
appnope==0.1.4
|
||||||
|
# via ipython
|
||||||
|
arrow==0.16.0
|
||||||
|
# via simplelogin
|
||||||
|
astroid==2.11.7
|
||||||
|
# via pylint
|
||||||
|
async-timeout==5.0.1
|
||||||
|
# via aiohttp
|
||||||
|
# via redis
|
||||||
|
atpublic==5.0
|
||||||
|
# via aiosmtpd
|
||||||
|
attrs==24.3.0
|
||||||
|
# via aiohttp
|
||||||
|
# via aiosmtpd
|
||||||
|
# via flanker
|
||||||
|
# via pytest
|
||||||
|
backcall==0.2.0
|
||||||
|
# via ipython
|
||||||
|
bcrypt==3.2.2
|
||||||
|
# via simplelogin
|
||||||
|
black==22.1.0
|
||||||
|
blinker==1.9.0
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via simplelogin
|
||||||
|
boto3==1.35.99
|
||||||
|
# via simplelogin
|
||||||
|
# via watchtower
|
||||||
|
botocore==1.35.99
|
||||||
|
# via boto3
|
||||||
|
# via s3transfer
|
||||||
|
cachetools==5.5.0
|
||||||
|
# via google-auth
|
||||||
|
cbor2==5.6.5
|
||||||
|
# via webauthn
|
||||||
|
certifi==2024.12.14
|
||||||
|
# via aiospamc
|
||||||
|
# via requests
|
||||||
|
# via sentry-sdk
|
||||||
|
cffi==1.17.1
|
||||||
|
# via bcrypt
|
||||||
|
# via cryptography
|
||||||
|
cfgv==3.4.0
|
||||||
|
# via pre-commit
|
||||||
|
chardet==4.0.0
|
||||||
|
# via flanker
|
||||||
|
# via requests
|
||||||
|
click==8.1.8
|
||||||
|
# via black
|
||||||
|
# via djlint
|
||||||
|
# via flask
|
||||||
|
# via typer
|
||||||
|
coinbase-commerce==1.0.1
|
||||||
|
# via simplelogin
|
||||||
|
colorama==0.4.6
|
||||||
|
# via djlint
|
||||||
|
coloredlogs==14.3
|
||||||
|
# via simplelogin
|
||||||
|
coverage==7.6.10
|
||||||
|
# via pytest-cov
|
||||||
|
crontab==0.22.8
|
||||||
|
# via yacron
|
||||||
|
cryptography==37.0.4
|
||||||
|
# via flanker
|
||||||
|
# via jwcrypto
|
||||||
|
# via pgpy
|
||||||
|
# via pyopenssl
|
||||||
|
# via simplelogin
|
||||||
|
# via webauthn
|
||||||
|
decorator==5.1.1
|
||||||
|
# via ipython
|
||||||
|
deprecated==1.2.15
|
||||||
|
# via jwcrypto
|
||||||
|
# via limits
|
||||||
|
# via simplelogin
|
||||||
|
dill==0.3.9
|
||||||
|
# via pylint
|
||||||
|
distlib==0.3.9
|
||||||
|
# via virtualenv
|
||||||
|
djlint==1.3.0
|
||||||
|
dkimpy==1.0.6
|
||||||
|
# via simplelogin
|
||||||
|
dnspython==2.6.1
|
||||||
|
# via dkimpy
|
||||||
|
# via email-validator
|
||||||
|
# via simplelogin
|
||||||
|
email-validator==1.1.3
|
||||||
|
# via simplelogin
|
||||||
|
facebook-sdk==3.1.0
|
||||||
|
# via simplelogin
|
||||||
|
filelock==3.16.1
|
||||||
|
# via tldextract
|
||||||
|
# via virtualenv
|
||||||
|
flanker==0.9.11
|
||||||
|
# via simplelogin
|
||||||
|
flask==1.1.2
|
||||||
|
# via flask-admin
|
||||||
|
# via flask-cors
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via flask-httpauth
|
||||||
|
# via flask-limiter
|
||||||
|
# via flask-login
|
||||||
|
# via flask-migrate
|
||||||
|
# via flask-profiler
|
||||||
|
# via flask-sqlalchemy
|
||||||
|
# via flask-wtf
|
||||||
|
# via simplelogin
|
||||||
|
flask-admin==1.5.8
|
||||||
|
# via simplelogin
|
||||||
|
flask-cors==3.0.10
|
||||||
|
# via simplelogin
|
||||||
|
flask-debugtoolbar==0.11.0
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
# via simplelogin
|
||||||
|
flask-debugtoolbar-sqlalchemy==0.2.0
|
||||||
|
# via simplelogin
|
||||||
|
flask-httpauth==4.8.0
|
||||||
|
# via flask-profiler
|
||||||
|
flask-limiter==1.4
|
||||||
|
# via simplelogin
|
||||||
|
flask-login==0.5.0
|
||||||
|
# via simplelogin
|
||||||
|
flask-migrate==2.5.3
|
||||||
|
# via simplelogin
|
||||||
|
flask-profiler==1.8.1
|
||||||
|
# via simplelogin
|
||||||
|
flask-sqlalchemy==2.5.1
|
||||||
|
# via flask-migrate
|
||||||
|
flask-wtf==0.14.3
|
||||||
|
# via simplelogin
|
||||||
|
frozenlist==1.5.0
|
||||||
|
# via aiohttp
|
||||||
|
# via aiosignal
|
||||||
|
future==1.0.0
|
||||||
|
# via webauthn
|
||||||
|
gevent==24.11.1
|
||||||
|
# via simplelogin
|
||||||
|
google-api-core==2.24.0
|
||||||
|
# via google-api-python-client
|
||||||
|
google-api-python-client==1.12.11
|
||||||
|
# via simplelogin
|
||||||
|
google-auth==2.37.0
|
||||||
|
# via google-api-core
|
||||||
|
# via google-api-python-client
|
||||||
|
# via google-auth-httplib2
|
||||||
|
google-auth-httplib2==0.0.4
|
||||||
|
# via google-api-python-client
|
||||||
|
# via simplelogin
|
||||||
|
googleapis-common-protos==1.66.0
|
||||||
|
# via google-api-core
|
||||||
|
greenlet==3.1.1
|
||||||
|
# via gevent
|
||||||
|
gunicorn==20.0.4
|
||||||
|
# via simplelogin
|
||||||
|
html-tag-names==0.1.2
|
||||||
|
# via djlint
|
||||||
|
html-void-elements==0.1.0
|
||||||
|
# via djlint
|
||||||
|
httplib2==0.22.0
|
||||||
|
# via google-api-python-client
|
||||||
|
# via google-auth-httplib2
|
||||||
|
humanfriendly==10.0
|
||||||
|
# via coloredlogs
|
||||||
|
identify==2.6.5
|
||||||
|
# via pre-commit
|
||||||
|
idna==2.10
|
||||||
|
# via email-validator
|
||||||
|
# via flanker
|
||||||
|
# via requests
|
||||||
|
# via tldextract
|
||||||
|
# via yarl
|
||||||
|
importlib-metadata==4.13.0
|
||||||
|
# via djlint
|
||||||
|
iniconfig==2.0.0
|
||||||
|
# via pytest
|
||||||
|
ipython==7.31.1
|
||||||
|
# via simplelogin
|
||||||
|
isort==5.13.2
|
||||||
|
# via pylint
|
||||||
|
itsdangerous==1.1.0
|
||||||
|
# via flask
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via flask-wtf
|
||||||
|
# via simplelogin
|
||||||
|
jedi==0.19.2
|
||||||
|
# via ipython
|
||||||
|
jinja2==2.11.3
|
||||||
|
# via flask
|
||||||
|
# via yacron
|
||||||
|
jmespath==1.0.1
|
||||||
|
# via boto3
|
||||||
|
# via botocore
|
||||||
|
jwcrypto==0.9.1
|
||||||
|
# via simplelogin
|
||||||
|
lazy-object-proxy==1.10.0
|
||||||
|
# via astroid
|
||||||
|
limits==4.0.0
|
||||||
|
# via flask-limiter
|
||||||
|
loguru==0.7.3
|
||||||
|
# via aiospamc
|
||||||
|
mako==1.3.8
|
||||||
|
# via alembic
|
||||||
|
markupsafe==1.1.1
|
||||||
|
# via jinja2
|
||||||
|
# via mako
|
||||||
|
# via simplelogin
|
||||||
|
# via wtforms
|
||||||
|
matplotlib-inline==0.1.7
|
||||||
|
# via ipython
|
||||||
|
mccabe==0.7.0
|
||||||
|
# via pylint
|
||||||
|
memory-profiler==0.57.0
|
||||||
|
# via simplelogin
|
||||||
|
multidict==6.1.0
|
||||||
|
# via aiohttp
|
||||||
|
# via yarl
|
||||||
|
mypy-extensions==1.0.0
|
||||||
|
# via black
|
||||||
|
newrelic==8.8.1
|
||||||
|
# via simplelogin
|
||||||
|
newrelic-telemetry-sdk==0.5.1
|
||||||
|
# via simplelogin
|
||||||
|
nodeenv==1.9.1
|
||||||
|
# via pre-commit
|
||||||
|
oauthlib==3.2.2
|
||||||
|
# via requests-oauthlib
|
||||||
|
packaging==24.2
|
||||||
|
# via limits
|
||||||
|
# via pytest
|
||||||
|
parso==0.8.4
|
||||||
|
# via jedi
|
||||||
|
pathspec==0.9.0
|
||||||
|
# via black
|
||||||
|
# via djlint
|
||||||
|
pexpect==4.9.0
|
||||||
|
# via ipython
|
||||||
|
pgpy==0.5.4
|
||||||
|
# via simplelogin
|
||||||
|
phpserialize==1.3
|
||||||
|
# via simplelogin
|
||||||
|
pickleshare==0.7.5
|
||||||
|
# via ipython
|
||||||
|
platformdirs==4.3.6
|
||||||
|
# via black
|
||||||
|
# via pylint
|
||||||
|
# via virtualenv
|
||||||
|
pluggy==1.5.0
|
||||||
|
# via pytest
|
||||||
|
ply==3.11
|
||||||
|
# via flanker
|
||||||
|
pre-commit==2.17.0
|
||||||
|
prompt-toolkit==3.0.48
|
||||||
|
# via ipython
|
||||||
|
propcache==0.2.1
|
||||||
|
# via aiohttp
|
||||||
|
# via yarl
|
||||||
|
proto-plus==1.25.0
|
||||||
|
# via google-api-core
|
||||||
|
protobuf==5.29.3
|
||||||
|
# via google-api-core
|
||||||
|
# via googleapis-common-protos
|
||||||
|
# via proto-plus
|
||||||
|
psutil==6.1.1
|
||||||
|
# via memory-profiler
|
||||||
|
psycopg2-binary==2.9.10
|
||||||
|
# via simplelogin
|
||||||
|
ptyprocess==0.7.0
|
||||||
|
# via pexpect
|
||||||
|
py==1.11.0
|
||||||
|
# via pytest
|
||||||
|
pyasn1==0.6.1
|
||||||
|
# via pgpy
|
||||||
|
# via pyasn1-modules
|
||||||
|
# via rsa
|
||||||
|
pyasn1-modules==0.4.1
|
||||||
|
# via google-auth
|
||||||
|
pycparser==2.22
|
||||||
|
# via cffi
|
||||||
|
pycryptodome==3.9.9
|
||||||
|
# via simplelogin
|
||||||
|
pygments==2.19.1
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
# via ipython
|
||||||
|
pyjwt==2.10.1
|
||||||
|
# via twilio
|
||||||
|
pylint==2.14.5
|
||||||
|
pyopenssl==19.1.0
|
||||||
|
# via simplelogin
|
||||||
|
# via webauthn
|
||||||
|
pyotp==2.4.1
|
||||||
|
# via simplelogin
|
||||||
|
pyparsing==3.2.1
|
||||||
|
# via httplib2
|
||||||
|
pyre2==0.3.6
|
||||||
|
# via simplelogin
|
||||||
|
pyspf==2.0.14
|
||||||
|
# via simplelogin
|
||||||
|
pytest==7.0.1
|
||||||
|
# via pytest-cov
|
||||||
|
pytest-cov==3.0.0
|
||||||
|
python-dateutil==2.9.0.post0
|
||||||
|
# via arrow
|
||||||
|
# via botocore
|
||||||
|
# via strictyaml
|
||||||
|
python-dotenv==0.14.0
|
||||||
|
# via simplelogin
|
||||||
|
python-gnupg==0.4.9
|
||||||
|
# via simplelogin
|
||||||
|
pytz==2024.2
|
||||||
|
# via twilio
|
||||||
|
# via yacron
|
||||||
|
pyyaml==6.0.2
|
||||||
|
# via djlint
|
||||||
|
# via pre-commit
|
||||||
|
redis==4.5.5
|
||||||
|
# via simplelogin
|
||||||
|
regex==2022.10.31
|
||||||
|
# via djlint
|
||||||
|
# via flanker
|
||||||
|
requests==2.25.1
|
||||||
|
# via coinbase-commerce
|
||||||
|
# via facebook-sdk
|
||||||
|
# via google-api-core
|
||||||
|
# via requests-file
|
||||||
|
# via requests-oauthlib
|
||||||
|
# via simplelogin
|
||||||
|
# via tldextract
|
||||||
|
# via twilio
|
||||||
|
requests-file==2.1.0
|
||||||
|
# via tldextract
|
||||||
|
requests-oauthlib==1.3.1
|
||||||
|
# via simplelogin
|
||||||
|
rsa==4.9
|
||||||
|
# via google-auth
|
||||||
|
ruamel-yaml==0.17.4
|
||||||
|
# via yacron
|
||||||
|
ruff==0.1.15
|
||||||
|
s3transfer==0.10.4
|
||||||
|
# via boto3
|
||||||
|
sentry-sdk==2.20.0
|
||||||
|
# via simplelogin
|
||||||
|
# via yacron
|
||||||
|
setuptools==75.8.0
|
||||||
|
# via astroid
|
||||||
|
# via gunicorn
|
||||||
|
# via ipython
|
||||||
|
# via zope-event
|
||||||
|
# via zope-interface
|
||||||
|
simplejson==3.19.3
|
||||||
|
# via flask-profiler
|
||||||
|
six==1.17.0
|
||||||
|
# via coinbase-commerce
|
||||||
|
# via flanker
|
||||||
|
# via flask-cors
|
||||||
|
# via flask-limiter
|
||||||
|
# via google-api-python-client
|
||||||
|
# via google-auth-httplib2
|
||||||
|
# via jwcrypto
|
||||||
|
# via pgpy
|
||||||
|
# via pyopenssl
|
||||||
|
# via python-dateutil
|
||||||
|
# via sqlalchemy-utils
|
||||||
|
# via webauthn
|
||||||
|
sqlalchemy==1.3.24
|
||||||
|
# via alembic
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
# via flask-sqlalchemy
|
||||||
|
# via simplelogin
|
||||||
|
# via sqlalchemy-utils
|
||||||
|
sqlalchemy-utils==0.36.8
|
||||||
|
# via simplelogin
|
||||||
|
sqlparse==0.5.3
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
strictyaml==1.7.3
|
||||||
|
# via yacron
|
||||||
|
tld==0.13
|
||||||
|
# via flanker
|
||||||
|
tldextract==3.1.2
|
||||||
|
# via simplelogin
|
||||||
|
toml==0.10.2
|
||||||
|
# via pre-commit
|
||||||
|
tomli==2.2.1
|
||||||
|
# via black
|
||||||
|
# via coverage
|
||||||
|
# via djlint
|
||||||
|
# via pylint
|
||||||
|
# via pytest
|
||||||
|
tomlkit==0.13.2
|
||||||
|
# via pylint
|
||||||
|
tqdm==4.67.1
|
||||||
|
# via djlint
|
||||||
|
traitlets==5.14.3
|
||||||
|
# via ipython
|
||||||
|
# via matplotlib-inline
|
||||||
|
twilio==7.3.2
|
||||||
|
# via simplelogin
|
||||||
|
typer==0.9.4
|
||||||
|
# via aiospamc
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via aiospamc
|
||||||
|
# via alembic
|
||||||
|
# via limits
|
||||||
|
# via multidict
|
||||||
|
# via typer
|
||||||
|
unidecode==1.1.2
|
||||||
|
# via simplelogin
|
||||||
|
uritemplate==3.0.1
|
||||||
|
# via google-api-python-client
|
||||||
|
urllib3==1.26.20
|
||||||
|
# via botocore
|
||||||
|
# via newrelic-telemetry-sdk
|
||||||
|
# via requests
|
||||||
|
# via sentry-sdk
|
||||||
|
virtualenv==20.29.0
|
||||||
|
# via pre-commit
|
||||||
|
watchtower==0.8.0
|
||||||
|
# via simplelogin
|
||||||
|
wcwidth==0.2.13
|
||||||
|
# via prompt-toolkit
|
||||||
|
webauthn==0.4.7
|
||||||
|
# via simplelogin
|
||||||
|
webob==1.8.9
|
||||||
|
# via flanker
|
||||||
|
werkzeug==1.0.1
|
||||||
|
# via flask
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via simplelogin
|
||||||
|
wrapt==1.17.2
|
||||||
|
# via astroid
|
||||||
|
# via deprecated
|
||||||
|
wtforms==2.3.3
|
||||||
|
# via flask-admin
|
||||||
|
# via flask-wtf
|
||||||
|
# via simplelogin
|
||||||
|
yacron==0.19.0
|
||||||
|
# via simplelogin
|
||||||
|
yarl==1.18.3
|
||||||
|
# via aiohttp
|
||||||
|
zipp==3.21.0
|
||||||
|
# via importlib-metadata
|
||||||
|
zope-event==5.0
|
||||||
|
# via gevent
|
||||||
|
zope-interface==7.2
|
||||||
|
# via gevent
|
392
app/requirements.lock
Normal file
392
app/requirements.lock
Normal file
@ -0,0 +1,392 @@
|
|||||||
|
# generated by rye
|
||||||
|
# use `rye lock` or `rye sync` to update this lockfile
|
||||||
|
#
|
||||||
|
# last locked with the following flags:
|
||||||
|
# pre: false
|
||||||
|
# features: []
|
||||||
|
# all-features: false
|
||||||
|
# with-sources: false
|
||||||
|
# generate-hashes: false
|
||||||
|
# universal: false
|
||||||
|
|
||||||
|
-e file:.
|
||||||
|
aiohttp==3.8.4
|
||||||
|
# via google-auth
|
||||||
|
# via yacron
|
||||||
|
aiosignal==1.2.0
|
||||||
|
# via aiohttp
|
||||||
|
aiosmtpd==1.4.2
|
||||||
|
# via simplelogin
|
||||||
|
aiosmtplib==1.1.4
|
||||||
|
# via yacron
|
||||||
|
aiospamc==0.10.0
|
||||||
|
# via simplelogin
|
||||||
|
alembic==1.4.3
|
||||||
|
# via flask-migrate
|
||||||
|
appnope==0.1.0
|
||||||
|
# via ipython
|
||||||
|
arrow==0.16.0
|
||||||
|
# via simplelogin
|
||||||
|
async-timeout==4.0.2
|
||||||
|
# via aiohttp
|
||||||
|
# via redis
|
||||||
|
atpublic==2.0
|
||||||
|
# via aiosmtpd
|
||||||
|
attrs==20.2.0
|
||||||
|
# via aiohttp
|
||||||
|
# via aiosmtpd
|
||||||
|
# via flanker
|
||||||
|
backcall==0.2.0
|
||||||
|
# via ipython
|
||||||
|
bcrypt==3.2.0
|
||||||
|
# via simplelogin
|
||||||
|
blinker==1.4
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via simplelogin
|
||||||
|
boto3==1.35.99
|
||||||
|
# via simplelogin
|
||||||
|
# via watchtower
|
||||||
|
botocore==1.35.99
|
||||||
|
# via boto3
|
||||||
|
# via s3transfer
|
||||||
|
cachetools==4.1.1
|
||||||
|
# via google-auth
|
||||||
|
cbor2==5.2.0
|
||||||
|
# via webauthn
|
||||||
|
certifi==2019.11.28
|
||||||
|
# via aiospamc
|
||||||
|
# via requests
|
||||||
|
# via sentry-sdk
|
||||||
|
cffi==1.14.4
|
||||||
|
# via bcrypt
|
||||||
|
# via cryptography
|
||||||
|
chardet==3.0.4
|
||||||
|
# via flanker
|
||||||
|
# via requests
|
||||||
|
charset-normalizer==3.4.1
|
||||||
|
# via aiohttp
|
||||||
|
click==8.0.3
|
||||||
|
# via flask
|
||||||
|
# via typer
|
||||||
|
coinbase-commerce==1.0.1
|
||||||
|
# via simplelogin
|
||||||
|
coloredlogs==14.0
|
||||||
|
# via simplelogin
|
||||||
|
crontab==0.22.8
|
||||||
|
# via yacron
|
||||||
|
cryptography==37.0.1
|
||||||
|
# via flanker
|
||||||
|
# via jwcrypto
|
||||||
|
# via pgpy
|
||||||
|
# via pyopenssl
|
||||||
|
# via simplelogin
|
||||||
|
# via webauthn
|
||||||
|
decorator==4.4.2
|
||||||
|
# via ipython
|
||||||
|
deprecated==1.2.13
|
||||||
|
# via simplelogin
|
||||||
|
dkimpy==1.0.5
|
||||||
|
# via simplelogin
|
||||||
|
dnspython==2.6.1
|
||||||
|
# via dkimpy
|
||||||
|
# via email-validator
|
||||||
|
# via simplelogin
|
||||||
|
email-validator==1.1.3
|
||||||
|
# via simplelogin
|
||||||
|
facebook-sdk==3.1.0
|
||||||
|
# via simplelogin
|
||||||
|
filelock==3.15.4
|
||||||
|
# via tldextract
|
||||||
|
flanker==0.9.11
|
||||||
|
# via simplelogin
|
||||||
|
flask==1.1.2
|
||||||
|
# via flask-admin
|
||||||
|
# via flask-cors
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via flask-httpauth
|
||||||
|
# via flask-limiter
|
||||||
|
# via flask-login
|
||||||
|
# via flask-migrate
|
||||||
|
# via flask-profiler
|
||||||
|
# via flask-sqlalchemy
|
||||||
|
# via flask-wtf
|
||||||
|
# via simplelogin
|
||||||
|
flask-admin==1.5.7
|
||||||
|
# via simplelogin
|
||||||
|
flask-cors==3.0.9
|
||||||
|
# via simplelogin
|
||||||
|
flask-debugtoolbar==0.11.0
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
# via simplelogin
|
||||||
|
flask-debugtoolbar-sqlalchemy==0.2.0
|
||||||
|
# via simplelogin
|
||||||
|
flask-httpauth==4.1.0
|
||||||
|
# via flask-profiler
|
||||||
|
flask-limiter==1.4
|
||||||
|
# via simplelogin
|
||||||
|
flask-login==0.5.0
|
||||||
|
# via simplelogin
|
||||||
|
flask-migrate==2.5.3
|
||||||
|
# via simplelogin
|
||||||
|
flask-profiler==1.8.1
|
||||||
|
# via simplelogin
|
||||||
|
flask-sqlalchemy==2.5.1
|
||||||
|
# via flask-migrate
|
||||||
|
flask-wtf==0.14.3
|
||||||
|
# via simplelogin
|
||||||
|
frozenlist==1.3.3
|
||||||
|
# via aiohttp
|
||||||
|
# via aiosignal
|
||||||
|
future==0.18.3
|
||||||
|
# via webauthn
|
||||||
|
gevent==24.11.1
|
||||||
|
# via simplelogin
|
||||||
|
google-api-core==1.22.2
|
||||||
|
# via google-api-python-client
|
||||||
|
google-api-python-client==1.12.3
|
||||||
|
# via simplelogin
|
||||||
|
google-auth==1.22.0
|
||||||
|
# via google-api-core
|
||||||
|
# via google-api-python-client
|
||||||
|
# via google-auth-httplib2
|
||||||
|
google-auth-httplib2==0.0.4
|
||||||
|
# via google-api-python-client
|
||||||
|
# via simplelogin
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
# via google-api-core
|
||||||
|
greenlet==3.1.1
|
||||||
|
# via gevent
|
||||||
|
gunicorn==20.0.4
|
||||||
|
# via simplelogin
|
||||||
|
httplib2==0.22.0
|
||||||
|
# via google-api-python-client
|
||||||
|
# via google-auth-httplib2
|
||||||
|
humanfriendly==8.2
|
||||||
|
# via coloredlogs
|
||||||
|
idna==2.10
|
||||||
|
# via email-validator
|
||||||
|
# via flanker
|
||||||
|
# via requests
|
||||||
|
# via tldextract
|
||||||
|
# via yarl
|
||||||
|
ipython==7.31.1
|
||||||
|
# via simplelogin
|
||||||
|
ipython-genutils==0.2.0
|
||||||
|
# via traitlets
|
||||||
|
itsdangerous==1.1.0
|
||||||
|
# via flask
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via flask-wtf
|
||||||
|
# via simplelogin
|
||||||
|
jedi==0.17.2
|
||||||
|
# via ipython
|
||||||
|
jinja2==2.11.3
|
||||||
|
# via flask
|
||||||
|
# via yacron
|
||||||
|
jmespath==0.10.0
|
||||||
|
# via boto3
|
||||||
|
# via botocore
|
||||||
|
jwcrypto==0.8
|
||||||
|
# via simplelogin
|
||||||
|
limits==1.5.1
|
||||||
|
# via flask-limiter
|
||||||
|
loguru==0.7.2
|
||||||
|
# via aiospamc
|
||||||
|
mako==1.2.4
|
||||||
|
# via alembic
|
||||||
|
markupsafe==1.1.1
|
||||||
|
# via jinja2
|
||||||
|
# via mako
|
||||||
|
# via simplelogin
|
||||||
|
# via wtforms
|
||||||
|
matplotlib-inline==0.1.3
|
||||||
|
# via ipython
|
||||||
|
memory-profiler==0.57.0
|
||||||
|
# via simplelogin
|
||||||
|
multidict==4.7.6
|
||||||
|
# via aiohttp
|
||||||
|
# via yarl
|
||||||
|
newrelic==8.8.0
|
||||||
|
# via simplelogin
|
||||||
|
newrelic-telemetry-sdk==0.5.0
|
||||||
|
# via simplelogin
|
||||||
|
oauthlib==3.1.0
|
||||||
|
# via requests-oauthlib
|
||||||
|
parso==0.7.1
|
||||||
|
# via jedi
|
||||||
|
pexpect==4.8.0
|
||||||
|
# via ipython
|
||||||
|
pgpy==0.5.4
|
||||||
|
# via simplelogin
|
||||||
|
phpserialize==1.3
|
||||||
|
# via simplelogin
|
||||||
|
pickleshare==0.7.5
|
||||||
|
# via ipython
|
||||||
|
ply==3.11
|
||||||
|
# via flanker
|
||||||
|
prompt-toolkit==3.0.7
|
||||||
|
# via ipython
|
||||||
|
protobuf==5.27.1
|
||||||
|
# via google-api-core
|
||||||
|
# via googleapis-common-protos
|
||||||
|
psutil==5.7.2
|
||||||
|
# via memory-profiler
|
||||||
|
psycopg2-binary==2.9.3
|
||||||
|
# via simplelogin
|
||||||
|
ptyprocess==0.6.0
|
||||||
|
# via pexpect
|
||||||
|
pyasn1==0.4.8
|
||||||
|
# via pgpy
|
||||||
|
# via pyasn1-modules
|
||||||
|
# via rsa
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
# via google-auth
|
||||||
|
pycparser==2.20
|
||||||
|
# via cffi
|
||||||
|
pycryptodome==3.9.8
|
||||||
|
# via simplelogin
|
||||||
|
pygments==2.7.4
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
# via ipython
|
||||||
|
pyjwt==2.4.0
|
||||||
|
# via twilio
|
||||||
|
pyopenssl==19.1.0
|
||||||
|
# via simplelogin
|
||||||
|
# via webauthn
|
||||||
|
pyotp==2.4.0
|
||||||
|
# via simplelogin
|
||||||
|
pyparsing==2.4.7
|
||||||
|
# via httplib2
|
||||||
|
pyre2==0.3.6
|
||||||
|
# via simplelogin
|
||||||
|
pyspf==2.0.14
|
||||||
|
# via simplelogin
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
# via alembic
|
||||||
|
# via arrow
|
||||||
|
# via botocore
|
||||||
|
# via strictyaml
|
||||||
|
python-dotenv==0.14.0
|
||||||
|
# via simplelogin
|
||||||
|
python-editor==1.0.4
|
||||||
|
# via alembic
|
||||||
|
python-gnupg==0.4.6
|
||||||
|
# via simplelogin
|
||||||
|
pytz==2020.1
|
||||||
|
# via google-api-core
|
||||||
|
# via twilio
|
||||||
|
# via yacron
|
||||||
|
redis==4.5.5
|
||||||
|
# via simplelogin
|
||||||
|
regex==2023.12.25
|
||||||
|
# via flanker
|
||||||
|
requests==2.25.1
|
||||||
|
# via coinbase-commerce
|
||||||
|
# via facebook-sdk
|
||||||
|
# via google-api-core
|
||||||
|
# via requests-file
|
||||||
|
# via requests-oauthlib
|
||||||
|
# via simplelogin
|
||||||
|
# via tldextract
|
||||||
|
# via twilio
|
||||||
|
requests-file==1.5.1
|
||||||
|
# via tldextract
|
||||||
|
requests-oauthlib==1.3.0
|
||||||
|
# via simplelogin
|
||||||
|
rsa==4.6
|
||||||
|
# via google-auth
|
||||||
|
ruamel-yaml==0.17.4
|
||||||
|
# via strictyaml
|
||||||
|
# via yacron
|
||||||
|
s3transfer==0.10.4
|
||||||
|
# via boto3
|
||||||
|
sentry-sdk==2.20.0
|
||||||
|
# via simplelogin
|
||||||
|
# via yacron
|
||||||
|
setuptools==67.6.0
|
||||||
|
# via google-api-core
|
||||||
|
# via google-auth
|
||||||
|
# via gunicorn
|
||||||
|
# via ipython
|
||||||
|
# via zope-event
|
||||||
|
# via zope-interface
|
||||||
|
simplejson==3.17.2
|
||||||
|
# via flask-profiler
|
||||||
|
six==1.15.0
|
||||||
|
# via bcrypt
|
||||||
|
# via coinbase-commerce
|
||||||
|
# via flanker
|
||||||
|
# via flask-cors
|
||||||
|
# via flask-limiter
|
||||||
|
# via google-api-core
|
||||||
|
# via google-api-python-client
|
||||||
|
# via google-auth
|
||||||
|
# via google-auth-httplib2
|
||||||
|
# via limits
|
||||||
|
# via pgpy
|
||||||
|
# via pyopenssl
|
||||||
|
# via python-dateutil
|
||||||
|
# via requests-file
|
||||||
|
# via sqlalchemy-utils
|
||||||
|
# via webauthn
|
||||||
|
sqlalchemy==1.3.24
|
||||||
|
# via alembic
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
# via flask-sqlalchemy
|
||||||
|
# via simplelogin
|
||||||
|
# via sqlalchemy-utils
|
||||||
|
sqlalchemy-utils==0.36.8
|
||||||
|
# via simplelogin
|
||||||
|
sqlparse==0.4.4
|
||||||
|
# via flask-debugtoolbar-sqlalchemy
|
||||||
|
strictyaml==1.1.0
|
||||||
|
# via yacron
|
||||||
|
tld==0.12.6
|
||||||
|
# via flanker
|
||||||
|
tldextract==3.1.2
|
||||||
|
# via simplelogin
|
||||||
|
traitlets==5.0.4
|
||||||
|
# via ipython
|
||||||
|
# via matplotlib-inline
|
||||||
|
twilio==7.3.2
|
||||||
|
# via simplelogin
|
||||||
|
typer==0.9.0
|
||||||
|
# via aiospamc
|
||||||
|
typing-extensions==4.8.0
|
||||||
|
# via aiospamc
|
||||||
|
# via typer
|
||||||
|
unidecode==1.1.1
|
||||||
|
# via simplelogin
|
||||||
|
uritemplate==3.0.1
|
||||||
|
# via google-api-python-client
|
||||||
|
urllib3==1.26.20
|
||||||
|
# via botocore
|
||||||
|
# via newrelic-telemetry-sdk
|
||||||
|
# via requests
|
||||||
|
# via sentry-sdk
|
||||||
|
watchtower==0.8.0
|
||||||
|
# via simplelogin
|
||||||
|
wcwidth==0.2.5
|
||||||
|
# via prompt-toolkit
|
||||||
|
webauthn==0.4.7
|
||||||
|
# via simplelogin
|
||||||
|
webob==1.8.7
|
||||||
|
# via flanker
|
||||||
|
werkzeug==1.0.1
|
||||||
|
# via flask
|
||||||
|
# via flask-debugtoolbar
|
||||||
|
# via simplelogin
|
||||||
|
wrapt==1.15.0
|
||||||
|
# via deprecated
|
||||||
|
wtforms==2.3.3
|
||||||
|
# via flask-admin
|
||||||
|
# via flask-wtf
|
||||||
|
# via simplelogin
|
||||||
|
yacron==0.19.0
|
||||||
|
# via simplelogin
|
||||||
|
yarl==1.9.2
|
||||||
|
# via aiohttp
|
||||||
|
zope-event==5.0
|
||||||
|
# via gevent
|
||||||
|
zope-interface==7.2
|
||||||
|
# via gevent
|
@ -4,12 +4,14 @@ SCRIPT_DIR="$(cd "$(dirname "$0")" || exit 1; pwd -P)"
|
|||||||
REPO_ROOT=$(echo "${SCRIPT_DIR}" | sed 's:scripts::g')
|
REPO_ROOT=$(echo "${SCRIPT_DIR}" | sed 's:scripts::g')
|
||||||
BUILD_INFO_FILE="${REPO_ROOT}/app/build_info.py"
|
BUILD_INFO_FILE="${REPO_ROOT}/app/build_info.py"
|
||||||
|
|
||||||
if [[ -z "$1" ]]; then
|
if [[ -z "$2" ]]; then
|
||||||
echo "This script needs to be invoked with the version as an argument"
|
echo "Invalid usage. Usage: $0 SHA VERSION"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
VERSION="$1"
|
SHA="$1"
|
||||||
echo "SHA1 = \"${VERSION}\"" > $BUILD_INFO_FILE
|
echo "SHA1 = \"${SHA}\"" > $BUILD_INFO_FILE
|
||||||
BUILD_TIME=$(date +%s)
|
BUILD_TIME=$(date +%s)
|
||||||
echo "BUILD_TIME = \"${BUILD_TIME}\"" >> $BUILD_INFO_FILE
|
echo "BUILD_TIME = \"${BUILD_TIME}\"" >> $BUILD_INFO_FILE
|
||||||
|
VERSION="$2"
|
||||||
|
echo "VERSION = \"${VERSION}\"" >> $BUILD_INFO_FILE
|
||||||
|
@ -12,10 +12,10 @@ docker run -p 25432:5432 --name ${container_name} -e POSTGRES_PASSWORD=postgres
|
|||||||
sleep 3
|
sleep 3
|
||||||
|
|
||||||
# upgrade the DB to the latest stage and
|
# upgrade the DB to the latest stage and
|
||||||
env DB_URI=postgresql://postgres:postgres@127.0.0.1:25432/sl poetry run alembic upgrade head
|
env DB_URI=postgresql://postgres:postgres@127.0.0.1:25432/sl uv run alembic upgrade head
|
||||||
|
|
||||||
# generate the migration script.
|
# generate the migration script.
|
||||||
env DB_URI=postgresql://postgres:postgres@127.0.0.1:25432/sl poetry run alembic revision --autogenerate $@
|
env DB_URI=postgresql://postgres:postgres@127.0.0.1:25432/sl uv run alembic revision --autogenerate $@
|
||||||
|
|
||||||
# remove the db
|
# remove the db
|
||||||
docker rm -f ${container_name}
|
docker rm -f ${container_name}
|
||||||
|
@ -3,5 +3,5 @@
|
|||||||
export DB_URI=postgresql://myuser:mypassword@localhost:15432/simplelogin
|
export DB_URI=postgresql://myuser:mypassword@localhost:15432/simplelogin
|
||||||
echo 'drop schema public cascade; create schema public;' | psql $DB_URI
|
echo 'drop schema public cascade; create schema public;' | psql $DB_URI
|
||||||
|
|
||||||
poetry run alembic upgrade head
|
uv run alembic upgrade head
|
||||||
poetry run flask dummy-data
|
uv run flask dummy-data
|
||||||
|
@ -3,4 +3,4 @@
|
|||||||
export DB_URI=postgresql://myuser:mypassword@localhost:15432/test
|
export DB_URI=postgresql://myuser:mypassword@localhost:15432/test
|
||||||
echo 'drop schema public cascade; create schema public;' | psql $DB_URI
|
echo 'drop schema public cascade; create schema public;' | psql $DB_URI
|
||||||
|
|
||||||
poetry run alembic upgrade head
|
uv run alembic upgrade head
|
||||||
|
@ -10,10 +10,10 @@ docker run -d --name sl-test-db -e POSTGRES_PASSWORD=test -e POSTGRES_USER=test
|
|||||||
sleep 3
|
sleep 3
|
||||||
|
|
||||||
# migrate the DB to the latest version
|
# migrate the DB to the latest version
|
||||||
CONFIG=tests/test.env poetry run alembic upgrade head
|
CONFIG=tests/test.env uv run alembic upgrade head
|
||||||
|
|
||||||
# run test
|
# run test
|
||||||
poetry run pytest -c pytest.ci.ini
|
uv run pytest -c pytest.ci.ini
|
||||||
|
|
||||||
# Delete the test DB
|
# Delete the test DB
|
||||||
docker rm -f sl-test-db
|
docker rm -f sl-test-db
|
||||||
|
@ -8,7 +8,6 @@ import flask_limiter
|
|||||||
import flask_profiler
|
import flask_profiler
|
||||||
import newrelic.agent
|
import newrelic.agent
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
|
|
||||||
from flask import (
|
from flask import (
|
||||||
Flask,
|
Flask,
|
||||||
redirect,
|
redirect,
|
||||||
@ -44,6 +43,7 @@ from app.admin_model import (
|
|||||||
MetricAdmin,
|
MetricAdmin,
|
||||||
InvalidMailboxDomainAdmin,
|
InvalidMailboxDomainAdmin,
|
||||||
EmailSearchAdmin,
|
EmailSearchAdmin,
|
||||||
|
CustomDomainSearchAdmin,
|
||||||
)
|
)
|
||||||
from app.api.base import api_bp
|
from app.api.base import api_bp
|
||||||
from app.auth.base import auth_bp
|
from app.auth.base import auth_bp
|
||||||
@ -99,6 +99,7 @@ from app.models import (
|
|||||||
InvalidMailboxDomain,
|
InvalidMailboxDomain,
|
||||||
)
|
)
|
||||||
from app.monitor.base import monitor_bp
|
from app.monitor.base import monitor_bp
|
||||||
|
from app.monitor_utils import send_version_event
|
||||||
from app.newsletter_utils import send_newsletter_to_user
|
from app.newsletter_utils import send_newsletter_to_user
|
||||||
from app.oauth.base import oauth_bp
|
from app.oauth.base import oauth_bp
|
||||||
from app.onboarding.base import onboarding_bp
|
from app.onboarding.base import onboarding_bp
|
||||||
@ -106,6 +107,7 @@ from app.payments.coinbase import setup_coinbase_commerce
|
|||||||
from app.payments.paddle import setup_paddle_callback
|
from app.payments.paddle import setup_paddle_callback
|
||||||
from app.phone.base import phone_bp
|
from app.phone.base import phone_bp
|
||||||
from app.redis_services import initialize_redis_services
|
from app.redis_services import initialize_redis_services
|
||||||
|
from app.request_utils import generate_request_id
|
||||||
from app.sentry_utils import sentry_before_send
|
from app.sentry_utils import sentry_before_send
|
||||||
|
|
||||||
if SENTRY_DSN:
|
if SENTRY_DSN:
|
||||||
@ -263,6 +265,7 @@ def set_index_page(app):
|
|||||||
and not request.path.startswith("/_debug_toolbar")
|
and not request.path.startswith("/_debug_toolbar")
|
||||||
):
|
):
|
||||||
g.start_time = time.time()
|
g.start_time = time.time()
|
||||||
|
g.request_id = generate_request_id()
|
||||||
|
|
||||||
# to handle the referral url that has ?slref=code part
|
# to handle the referral url that has ?slref=code part
|
||||||
ref_code = request.args.get("slref")
|
ref_code = request.args.get("slref")
|
||||||
@ -293,6 +296,7 @@ def set_index_page(app):
|
|||||||
newrelic.agent.record_custom_event(
|
newrelic.agent.record_custom_event(
|
||||||
"HttpResponseStatus", {"code": res.status_code}
|
"HttpResponseStatus", {"code": res.status_code}
|
||||||
)
|
)
|
||||||
|
send_version_event("app")
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
@ -442,7 +446,12 @@ def init_admin(app):
|
|||||||
admin = Admin(name="SimpleLogin", template_mode="bootstrap4")
|
admin = Admin(name="SimpleLogin", template_mode="bootstrap4")
|
||||||
|
|
||||||
admin.init_app(app, index_view=SLAdminIndexView())
|
admin.init_app(app, index_view=SLAdminIndexView())
|
||||||
admin.add_view(EmailSearchAdmin(name="Email Search", endpoint="email_search"))
|
admin.add_view(EmailSearchAdmin(name="Email Search", endpoint="admin.email_search"))
|
||||||
|
admin.add_view(
|
||||||
|
CustomDomainSearchAdmin(
|
||||||
|
name="Custom domain search", endpoint="admin.custom_domain_search"
|
||||||
|
)
|
||||||
|
)
|
||||||
admin.add_view(UserAdmin(User, Session))
|
admin.add_view(UserAdmin(User, Session))
|
||||||
admin.add_view(AliasAdmin(Alias, Session))
|
admin.add_view(AliasAdmin(Alias, Session))
|
||||||
admin.add_view(MailboxAdmin(Mailbox, Session))
|
admin.add_view(MailboxAdmin(Mailbox, Session))
|
||||||
@ -492,9 +501,9 @@ def register_custom_commands(app):
|
|||||||
from init_app import add_sl_domains, add_proton_partner
|
from init_app import add_sl_domains, add_proton_partner
|
||||||
|
|
||||||
LOG.w("reset db, add fake data")
|
LOG.w("reset db, add fake data")
|
||||||
|
add_proton_partner()
|
||||||
fake_data()
|
fake_data()
|
||||||
add_sl_domains()
|
add_sl_domains()
|
||||||
add_proton_partner()
|
|
||||||
|
|
||||||
@app.cli.command("send-newsletter")
|
@app.cli.command("send-newsletter")
|
||||||
@click.option("-n", "--newsletter_id", type=int, help="Newsletter ID to be sent")
|
@click.option("-n", "--newsletter_id", type=int, help="Newsletter ID to be sent")
|
||||||
@ -576,7 +585,8 @@ def local_main():
|
|||||||
# enable flask toolbar
|
# enable flask toolbar
|
||||||
from flask_debugtoolbar import DebugToolbarExtension
|
from flask_debugtoolbar import DebugToolbarExtension
|
||||||
|
|
||||||
app.config["DEBUG_TB_PROFILER_ENABLED"] = True
|
# Disabled in python 3.12 as it collides with the default CPython profiler
|
||||||
|
app.config["DEBUG_TB_PROFILER_ENABLED"] = False
|
||||||
app.config["DEBUG_TB_INTERCEPT_REDIRECTS"] = False
|
app.config["DEBUG_TB_INTERCEPT_REDIRECTS"] = False
|
||||||
app.debug = True
|
app.debug = True
|
||||||
DebugToolbarExtension(app)
|
DebugToolbarExtension(app)
|
||||||
|
181
app/templates/admin/custom_domain_search.html
Normal file
181
app/templates/admin/custom_domain_search.html
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
{% extends 'admin/master.html' %}
|
||||||
|
|
||||||
|
{% block head_css %}
|
||||||
|
|
||||||
|
{{ super() }}
|
||||||
|
<style>
|
||||||
|
.card-shadow {
|
||||||
|
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.15);
|
||||||
|
border-radius: 8px;
|
||||||
|
}
|
||||||
|
.domain-title {
|
||||||
|
background-color: #007bff;
|
||||||
|
color: white;
|
||||||
|
padding: 10px;
|
||||||
|
border-radius: 8px 8px 0 0;
|
||||||
|
}
|
||||||
|
.status-icon {
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
{% endblock %}
|
||||||
|
{% macro show_user(user) -%}
|
||||||
|
<h4>
|
||||||
|
User <a href="/admin/email_search?email={{ user.email }}">{{ user.email }}</a> with ID {{ user.id }}.
|
||||||
|
</h4>
|
||||||
|
<table class="table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">User ID</th>
|
||||||
|
<th scope="col">Email</th>
|
||||||
|
<th scope="col">Verified</th>
|
||||||
|
<th scope="col">Status</th>
|
||||||
|
<th scope="col">Paid</th>
|
||||||
|
<th scope="col">Premium</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>{{ user.id }}</td>
|
||||||
|
<td>
|
||||||
|
<a href="/admin/email_search?email={{ user.email }}">{{ user.email }}</a>
|
||||||
|
</td>
|
||||||
|
{% if user.activated %}
|
||||||
|
|
||||||
|
<td class="text-success">Activated</td>
|
||||||
|
{% else %}
|
||||||
|
<td class="text-warning">Pending</td>
|
||||||
|
{% endif %}
|
||||||
|
{% if user.disabled %}
|
||||||
|
|
||||||
|
<td class="text-danger">Disabled</td>
|
||||||
|
{% else %}
|
||||||
|
<td class="text-success">Enabled</td>
|
||||||
|
{% endif %}
|
||||||
|
<td>{{ "yes" if user.is_paid() else "No" }}</td>
|
||||||
|
<td>{{ "yes" if user.is_premium() else "No" }}</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{%- endmacro %}
|
||||||
|
{% macro show_verification(title, expected, errors) -%}
|
||||||
|
{% if not expected %}
|
||||||
|
|
||||||
|
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||||
|
<h5>{{ title }}</h5>
|
||||||
|
<span class="text-success status-icon"><i class="fa fa-check-circle"></i></span>
|
||||||
|
</li>
|
||||||
|
{% else %}
|
||||||
|
<li class="list-group-item">
|
||||||
|
<h5>{{ title }}</h5>
|
||||||
|
<p>
|
||||||
|
<strong>Expected:</strong> {{ expected.recommended }}
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Allowed:</strong>
|
||||||
|
<ul>
|
||||||
|
{% for expected_record in expected.allowed %}<li>{{ expected_record }}</li>{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Current response:</strong>
|
||||||
|
</p>
|
||||||
|
{% for error in errors %}
|
||||||
|
|
||||||
|
<ul class="list-group">
|
||||||
|
<li class="list-group-item">{{ error }}</li>
|
||||||
|
</ul>
|
||||||
|
{% endfor %}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{%- endmacro %}
|
||||||
|
{% macro show_mx_verification(title, expected, errors) -%}
|
||||||
|
{% if not expected %}
|
||||||
|
|
||||||
|
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||||
|
<h5>{{ title }}</h5>
|
||||||
|
<span class="text-success status-icon"><i class="fa fa-check-circle"></i></span>
|
||||||
|
</li>
|
||||||
|
{% else %}
|
||||||
|
<li class="list-group-item">
|
||||||
|
<h5>{{ title }}</h5>
|
||||||
|
<ul>
|
||||||
|
<li class="list-group-item">
|
||||||
|
{% for prio in expected %}
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<strong>Priority {{ prio }}:</strong> {{ expected[prio].recommended }}
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Allowed:</strong>
|
||||||
|
<ul>
|
||||||
|
{% for expected_record in expected[prio].allowed %}<li>{{ expected_record }}</li>{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Current response:</strong>
|
||||||
|
</p>
|
||||||
|
{% for error in errors %}
|
||||||
|
|
||||||
|
<ul class="list-group">
|
||||||
|
<li class="list-group-item">{{ error }}</li>
|
||||||
|
</ul>
|
||||||
|
{% endfor %}
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{%- endmacro %}
|
||||||
|
{% macro show_domain(domain_with_data) -%}
|
||||||
|
<div class="col-md-3 mb-4">
|
||||||
|
<div class="card card-shadow">
|
||||||
|
<div class="domain-title text-center">
|
||||||
|
<h4>Domain {{ domain_with_data.domain.domain }}</h4>
|
||||||
|
</div>
|
||||||
|
<div class="card-body">
|
||||||
|
{% set domain = domain_with_data.domain %}
|
||||||
|
<ul class="list-group">
|
||||||
|
{{ show_verification("Ownership", domain_with_data.ownership_expected, domain_with_data.ownership_validation.errors) }}
|
||||||
|
{{ show_mx_verification("MX", domain_with_data.mx_expected, domain_with_data.mx_validation.errors) }}
|
||||||
|
{{ show_verification("SPF", domain_with_data.spf_expected, domain_with_data.spf_validation.errors) }}
|
||||||
|
{% for dkim_domain in domain_with_data.dkim_expected %}
|
||||||
|
|
||||||
|
{{ show_verification("DKIM {}.{}".format(dkim_domain, domain.domain) , domain_with_data.dkim_expected[dkim_domain], [domain_with_data.dkim_validation.get(dkim_domain+"."+domain.domain,'')]) }}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{%- endmacro %}
|
||||||
|
{% block body %}
|
||||||
|
|
||||||
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
|
<form method="get">
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="email">User or domain to search:</label>
|
||||||
|
<input type="text"
|
||||||
|
class="form-control"
|
||||||
|
name="user"
|
||||||
|
value="{{ query or '' }}" />
|
||||||
|
</div>
|
||||||
|
<button type="submit" class="btn btn-primary">Submit</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{% if data.no_match and query %}
|
||||||
|
|
||||||
|
<div class="border border-dark border-2 mt-1 mb-2 p-3 alert alert-warning"
|
||||||
|
role="alert">No user, alias or mailbox found for {{ query }}</div>
|
||||||
|
{% endif %}
|
||||||
|
{% if data.user %}
|
||||||
|
|
||||||
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
|
<h3 class="mb-3">Found User {{ data.user.email }}</h3>
|
||||||
|
{{ show_user(data.user) }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="row mt-4">
|
||||||
|
{% for domain_with_data in data.domains %}{{ show_domain(domain_with_data) }}{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
@ -22,7 +22,7 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td>{{ user.id }}</td>
|
<td>{{ user.id }}</td>
|
||||||
<td>
|
<td>
|
||||||
<a href="?email={{ user.email }}">{{ user.email }}</a>
|
<a href="?query={{ user.email }}">{{ user.email }}</a>
|
||||||
</td>
|
</td>
|
||||||
{% if user.activated %}
|
{% if user.activated %}
|
||||||
|
|
||||||
@ -43,8 +43,16 @@
|
|||||||
<td>{{ user.updated_at }}</td>
|
<td>{{ user.updated_at }}</td>
|
||||||
{% if pu %}
|
{% if pu %}
|
||||||
|
|
||||||
<td>
|
<td class="flex">
|
||||||
<a href="?email={{ pu.partner_email }}">{{ pu.partner_email }}</a>
|
<a href="?query={{ pu.partner_email }}">{{ pu.partner_email }}</a>
|
||||||
|
<form class="d-inline"
|
||||||
|
action="{{ url_for("admin.email_search.delete_partner_link") }}"
|
||||||
|
method="POST">
|
||||||
|
<input type="hidden" name="user_id" value="{{ user.id }}">
|
||||||
|
<button type="submit"
|
||||||
|
onclick="return confirm('Are you sure you would like to unlink the user?');"
|
||||||
|
class="btn btn-danger d-inline">Unlink</button>
|
||||||
|
</form>
|
||||||
</td>
|
</td>
|
||||||
{% else %}
|
{% else %}
|
||||||
<td>No</td>
|
<td>No</td>
|
||||||
@ -72,7 +80,7 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td>{{ mailbox.id }}</td>
|
<td>{{ mailbox.id }}</td>
|
||||||
<td>
|
<td>
|
||||||
<a href="?email={{ mailbox.email }}">{{ mailbox.email }}</a>
|
<a href="?query={{ mailbox.email }}">{{ mailbox.email }}</a>
|
||||||
</td>
|
</td>
|
||||||
<td>{{ "Yes" if mailbox.verified else "No" }}</td>
|
<td>{{ "Yes" if mailbox.verified else "No" }}</td>
|
||||||
<td>{{ mailbox.created_at }}</td>
|
<td>{{ mailbox.created_at }}</td>
|
||||||
@ -101,7 +109,7 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td>{{ alias.id }}</td>
|
<td>{{ alias.id }}</td>
|
||||||
<td>
|
<td>
|
||||||
<a href="?email={{ alias.email }}">{{ alias.email }}</a>
|
<a href="?query={{ alias.email }}">{{ alias.email }}</a>
|
||||||
</td>
|
</td>
|
||||||
<td>{{ "Yes" if alias.enabled else "No" }}</td>
|
<td>{{ "Yes" if alias.enabled else "No" }}</td>
|
||||||
<td>{{ alias.created_at }}</td>
|
<td>{{ alias.created_at }}</td>
|
||||||
@ -181,7 +189,7 @@
|
|||||||
<td>{{ entry.user_id }}</td>
|
<td>{{ entry.user_id }}</td>
|
||||||
<td>{{ entry.alias_id }}</td>
|
<td>{{ entry.alias_id }}</td>
|
||||||
<td>
|
<td>
|
||||||
<a href="?email={{ entry.alias_email }}">{{ entry.alias_email }}</a>
|
<a href="?query={{ entry.alias_email }}">{{ entry.alias_email }}</a>
|
||||||
</td>
|
</td>
|
||||||
<td>{{ entry.action }}</td>
|
<td>{{ entry.action }}</td>
|
||||||
<td>{{ entry.message }}</td>
|
<td>{{ entry.message }}</td>
|
||||||
@ -207,7 +215,7 @@
|
|||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<a href="?email={{ entry.user_email }}">{{ entry.user_email }}</a>
|
<a href="?query={{ entry.user_email }}">{{ entry.user_email }}</a>
|
||||||
</td>
|
</td>
|
||||||
<td>{{ entry.action }}</td>
|
<td>{{ entry.action }}</td>
|
||||||
<td>{{ entry.message }}</td>
|
<td>{{ entry.message }}</td>
|
||||||
@ -222,10 +230,10 @@
|
|||||||
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
<form method="get">
|
<form method="get">
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="email">Email to search:</label>
|
<label for="email">UserID or Email to search:</label>
|
||||||
<input type="text"
|
<input type="text"
|
||||||
class="form-control"
|
class="form-control"
|
||||||
name="email"
|
name="query"
|
||||||
value="{{ email or '' }}" />
|
value="{{ email or '' }}" />
|
||||||
</div>
|
</div>
|
||||||
<button type="submit" class="btn btn-primary">Submit</button>
|
<button type="submit" class="btn btn-primary">Submit</button>
|
||||||
|
@ -38,7 +38,7 @@
|
|||||||
Value: <em data-toggle="tooltip"
|
Value: <em data-toggle="tooltip"
|
||||||
title="Click to copy"
|
title="Click to copy"
|
||||||
class="clipboard"
|
class="clipboard"
|
||||||
data-clipboard-text="{{ ownership_record }}">{{ ownership_record }}</em>
|
data-clipboard-text="{{ ownership_records.recommended }}">{{ ownership_records.recommended }}</em>
|
||||||
</div>
|
</div>
|
||||||
<form method="post" action="#ownership-form">
|
<form method="post" action="#ownership-form">
|
||||||
{{ csrf_form.csrf_token }}
|
{{ csrf_form.csrf_token }}
|
||||||
@ -91,7 +91,7 @@
|
|||||||
<br />
|
<br />
|
||||||
Some domain registrars (Namecheap, CloudFlare, etc) might also use <em>@</em> for the root domain.
|
Some domain registrars (Namecheap, CloudFlare, etc) might also use <em>@</em> for the root domain.
|
||||||
</div>
|
</div>
|
||||||
{% for record in expected_mx_records %}
|
{% for prio in expected_mx_records %}
|
||||||
|
|
||||||
<div class="mb-3 p-3 dns-record">
|
<div class="mb-3 p-3 dns-record">
|
||||||
Record: MX
|
Record: MX
|
||||||
@ -99,12 +99,12 @@
|
|||||||
Domain: {{ custom_domain.domain }} or
|
Domain: {{ custom_domain.domain }} or
|
||||||
<b>@</b>
|
<b>@</b>
|
||||||
<br />
|
<br />
|
||||||
Priority: {{ record.priority }}
|
Priority: {{ prio }}
|
||||||
<br />
|
<br />
|
||||||
Target: <em data-toggle="tooltip"
|
Target: <em data-toggle="tooltip"
|
||||||
title="Click to copy"
|
title="Click to copy"
|
||||||
class="clipboard"
|
class="clipboard"
|
||||||
data-clipboard-text="{{ record.domain }}">{{ record.domain }}</em>
|
data-clipboard-text="{{ expected_mx_records[prio].recommended }}">{{ expected_mx_records[prio].recommended }}</em>
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<form method="post" action="#mx-form">
|
<form method="post" action="#mx-form">
|
||||||
@ -251,8 +251,8 @@
|
|||||||
<em data-toggle="tooltip"
|
<em data-toggle="tooltip"
|
||||||
title="Click to copy"
|
title="Click to copy"
|
||||||
class="clipboard"
|
class="clipboard"
|
||||||
data-clipboard-text="{{ dkim_cname_value }}."
|
data-clipboard-text="{{ dkim_cname_value.recommended }}."
|
||||||
style="overflow-wrap: break-word">{{ dkim_cname_value }}.</em>
|
style="overflow-wrap: break-word">{{ dkim_cname_value.recommended }}.</em>
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<div class="alert alert-info">
|
<div class="alert alert-info">
|
||||||
|
@ -57,31 +57,21 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block default_content %}
|
{% block default_content %}
|
||||||
|
|
||||||
{% if NOW.timestamp < 1701475201 %}
|
{% if NOW.timestamp < 1733184000 %}
|
||||||
|
|
||||||
<div class="alert alert-info">
|
<div class="alert alert-primary">
|
||||||
Black Friday Deal: 33% off on the yearly plan for the <b>first</b> year ($20 instead of $30).
|
Lifetime deal for SimpleLogin Premium and Proton Pass Plus for $199
|
||||||
|
<a class="btn btn-primary"
|
||||||
|
href="https://proton.me/pass/black-friday"
|
||||||
|
target="_blank">Buy now</a>
|
||||||
<br>
|
<br>
|
||||||
Please use this coupon code
|
Available until December 3, 2024.
|
||||||
<em data-toggle="tooltip"
|
|
||||||
title="Click to copy"
|
|
||||||
class="clipboard"
|
|
||||||
data-clipboard-text="BF2023">BF2023</em> during the checkout.
|
|
||||||
<br>
|
|
||||||
<img src="/static/images/coupon.png" class="m-2" style="max-width: 300px">
|
|
||||||
<br>
|
|
||||||
Available until December 1, 2023.
|
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<div class="pb-8">
|
<div class="pb-8">
|
||||||
<div class="text-center mx-md-auto mb-8 mt-6">
|
<div class="text-center mx-md-auto mb-4 mt-4">
|
||||||
<h1>Upgrade to unlock premium features</h1>
|
<h1>Upgrade to unlock premium features</h1>
|
||||||
</div>
|
</div>
|
||||||
<div class="alert alert-info">
|
|
||||||
<span class="badge badge-success">new</span> SimpleLogin Premium now includes Proton Pass premium features.
|
|
||||||
<a href="https://simplelogin.io/blog/sl-premium-including-pass-plus/"
|
|
||||||
target="_blank">Learn more ↗</a>
|
|
||||||
</div>
|
|
||||||
{% if manual_sub %}
|
{% if manual_sub %}
|
||||||
|
|
||||||
<div class="alert alert-info mt-0 mb-6">
|
<div class="alert alert-info mt-0 mb-6">
|
||||||
@ -131,6 +121,11 @@
|
|||||||
aria-selected="true">Yearly<span class="badge badge-success position-absolute tab-yearly__badge"
|
aria-selected="true">Yearly<span class="badge badge-success position-absolute tab-yearly__badge"
|
||||||
style="font-size: 12px">Save $18</span></a>
|
style="font-size: 12px">Save $18</span></a>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="alert alert-info">
|
||||||
|
<span class="badge badge-success">new</span> SimpleLogin Premium now includes Proton Pass premium features.
|
||||||
|
<a href="https://simplelogin.io/blog/sl-premium-including-pass-plus/"
|
||||||
|
target="_blank">Learn more ↗</a>
|
||||||
|
</div>
|
||||||
<div class="tab-content mb-8">
|
<div class="tab-content mb-8">
|
||||||
<!-- monthly tab content -->
|
<!-- monthly tab content -->
|
||||||
<div class="tab-pane"
|
<div class="tab-pane"
|
||||||
@ -223,12 +218,12 @@
|
|||||||
<div class="card card-md flex-grow-1">
|
<div class="card card-md flex-grow-1">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
<div class="text-center">
|
<div class="text-center">
|
||||||
<div class="h3">Proton plan</div>
|
<div class="h3">Proton Unlimited</div>
|
||||||
<div class="h3 my-3">Starts at $12.99 / month</div>
|
<div class="h3 my-3">Starts at $12.99 / month</div>
|
||||||
<div class="text-center mt-4 mb-6">
|
<div class="text-center mt-4 mb-6">
|
||||||
<a class="btn btn-lg btn-outline-primary w-100"
|
<a class="btn btn-lg btn-outline-primary w-100"
|
||||||
role="button"
|
role="button"
|
||||||
href="https://account.proton.me/u/0/mail/upgrade"
|
href="https://account.proton.me/u/0/pass/upgrade"
|
||||||
target="_blank">Upgrade your Proton account</a>
|
target="_blank">Upgrade your Proton account</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -362,12 +357,12 @@
|
|||||||
<div class="card card-md flex-grow-1">
|
<div class="card card-md flex-grow-1">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
<div class="text-center">
|
<div class="text-center">
|
||||||
<div class="h3">Proton plan</div>
|
<div class="h3">Proton Unlimited</div>
|
||||||
<div class="h3 my-3">Starts at $119.88 / year</div>
|
<div class="h3 my-3">Starts at $119.88 / year</div>
|
||||||
<div class="text-center mt-4 mb-6">
|
<div class="text-center mt-4 mb-6">
|
||||||
<a class="btn btn-lg btn-outline-primary w-100"
|
<a class="btn btn-lg btn-outline-primary w-100"
|
||||||
role="button"
|
role="button"
|
||||||
href="https://account.proton.me/u/0/mail/upgrade"
|
href="https://account.proton.me/u/0/pass/upgrade"
|
||||||
target="_blank">Upgrade your Proton account</a>
|
target="_blank">Upgrade your Proton account</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -79,7 +79,14 @@
|
|||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if partner_sub %}<div>Premium subscription managed by {{ partner_name }}.</div>{% endif %}
|
{% if partner_sub %}
|
||||||
|
{% if partner_sub.lifetime %}
|
||||||
|
|
||||||
|
<div>Premium lifetime subscription managed by {{ partner_name }}.</div>
|
||||||
|
{% else %}
|
||||||
|
<div>Premium subscription managed by {{ partner_name }}.</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
{% elif current_user.in_trial() %}
|
{% elif current_user.in_trial() %}
|
||||||
Your Premium trial expires {{ current_user.trial_end | dt }}.
|
Your Premium trial expires {{ current_user.trial_end | dt }}.
|
||||||
{% else %}
|
{% else %}
|
||||||
@ -137,7 +144,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<!-- END change name & profile picture -->
|
<!-- END change name & profile picture -->
|
||||||
<!-- Connect with Proton -->
|
<!-- Connect with Proton -->
|
||||||
{% if connect_with_proton %}
|
{% if connect_with_proton and can_unlink_proton_account %}
|
||||||
|
|
||||||
<div class="card" id="connect-with-proton">
|
<div class="card" id="connect-with-proton">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
|
@ -549,7 +549,7 @@ def test_create_contact_route_free_users(flask_client):
|
|||||||
assert r.status_code == 201
|
assert r.status_code == 201
|
||||||
|
|
||||||
# End trial and disallow for new free users. Config should allow it
|
# End trial and disallow for new free users. Config should allow it
|
||||||
user.flags = User.FLAG_DISABLE_CREATE_CONTACTS
|
user.flags = User.FLAG_FREE_DISABLE_CREATE_CONTACTS
|
||||||
Session.commit()
|
Session.commit()
|
||||||
r = flask_client.post(
|
r = flask_client.post(
|
||||||
url_for("api.create_contact_route", alias_id=alias.id),
|
url_for("api.create_contact_route", alias_id=alias.id),
|
||||||
@ -647,8 +647,8 @@ def test_get_alias(flask_client):
|
|||||||
|
|
||||||
|
|
||||||
def test_is_reverse_alias(flask_client):
|
def test_is_reverse_alias(flask_client):
|
||||||
assert is_reverse_alias("ra+abcd@sl.local")
|
assert is_reverse_alias("ra+abcd@sl.lan")
|
||||||
assert is_reverse_alias("reply+abcd@sl.local")
|
assert is_reverse_alias("reply+abcd@sl.lan")
|
||||||
|
|
||||||
assert not is_reverse_alias("ra+abcd@test.org")
|
assert not is_reverse_alias("ra+abcd@test.org")
|
||||||
assert not is_reverse_alias("reply+abcd@test.org")
|
assert not is_reverse_alias("reply+abcd@test.org")
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
from flask import g
|
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.alias_suffix import signer
|
from app.alias_suffix import signer
|
||||||
from app.alias_utils import delete_alias
|
from app.alias_utils import delete_alias
|
||||||
@ -7,7 +5,7 @@ from app.config import EMAIL_DOMAIN, MAX_NB_EMAIL_FREE_PLAN
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Alias, CustomDomain, Mailbox, AliasUsedOn
|
from app.models import Alias, CustomDomain, Mailbox, AliasUsedOn
|
||||||
from app.utils import random_word
|
from app.utils import random_word
|
||||||
from tests.utils import login, random_domain, random_token
|
from tests.utils import fix_rate_limit_after_request, login, random_domain, random_token
|
||||||
|
|
||||||
|
|
||||||
def test_v2(flask_client):
|
def test_v2(flask_client):
|
||||||
@ -276,7 +274,7 @@ def test_too_many_requests(flask_client):
|
|||||||
|
|
||||||
# to make flask-limiter work with unit test
|
# to make flask-limiter work with unit test
|
||||||
# https://github.com/alisaifee/flask-limiter/issues/147#issuecomment-642683820
|
# https://github.com/alisaifee/flask-limiter/issues/147#issuecomment-642683820
|
||||||
g._rate_limiting_complete = False
|
fix_rate_limit_after_request()
|
||||||
else:
|
else:
|
||||||
# last request
|
# last request
|
||||||
assert r.status_code == 429
|
assert r.status_code == 429
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from flask import url_for, g
|
from flask import url_for
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.config import EMAIL_DOMAIN, MAX_NB_EMAIL_FREE_PLAN
|
from app.config import EMAIL_DOMAIN, MAX_NB_EMAIL_FREE_PLAN
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Alias, CustomDomain, AliasUsedOn
|
from app.models import Alias, CustomDomain, AliasUsedOn
|
||||||
from tests.utils import login, random_domain
|
from tests.utils import fix_rate_limit_after_request, login, random_domain
|
||||||
|
|
||||||
|
|
||||||
def test_with_hostname(flask_client):
|
def test_with_hostname(flask_client):
|
||||||
@ -17,7 +17,7 @@ def test_with_hostname(flask_client):
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert r.status_code == 201
|
assert r.status_code == 201
|
||||||
assert r.json["alias"].endswith("d1.test")
|
assert r.json["alias"].endswith("d1.lan")
|
||||||
|
|
||||||
# make sure alias starts with the suggested prefix
|
# make sure alias starts with the suggested prefix
|
||||||
assert r.json["alias"].startswith("test")
|
assert r.json["alias"].startswith("test")
|
||||||
@ -133,7 +133,7 @@ def test_too_many_requests(flask_client):
|
|||||||
)
|
)
|
||||||
# to make flask-limiter work with unit test
|
# to make flask-limiter work with unit test
|
||||||
# https://github.com/alisaifee/flask-limiter/issues/147#issuecomment-642683820
|
# https://github.com/alisaifee/flask-limiter/issues/147#issuecomment-642683820
|
||||||
g._rate_limiting_complete = False
|
fix_rate_limit_after_request()
|
||||||
else:
|
else:
|
||||||
# last request
|
# last request
|
||||||
assert r.status_code == 429
|
assert r.status_code == 429
|
||||||
|
@ -112,14 +112,14 @@ def test_get_alias_infos_with_pagination_v3_no_duplicate_when_empty_contact(
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email="contact@example.com",
|
website_email="contact@example.com",
|
||||||
reply_email="rep@sl.local",
|
reply_email="rep@sl.lan",
|
||||||
)
|
)
|
||||||
|
|
||||||
Contact.create(
|
Contact.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email="contact2@example.com",
|
website_email="contact2@example.com",
|
||||||
reply_email="rep2@sl.local",
|
reply_email="rep2@sl.lan",
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_infos = get_alias_infos_with_pagination_v3(user)
|
alias_infos = get_alias_infos_with_pagination_v3(user)
|
||||||
|
@ -15,7 +15,7 @@ def test_get_setting(flask_client):
|
|||||||
assert r.json == {
|
assert r.json == {
|
||||||
"alias_generator": "word",
|
"alias_generator": "word",
|
||||||
"notification": True,
|
"notification": True,
|
||||||
"random_alias_default_domain": "sl.local",
|
"random_alias_default_domain": "sl.lan",
|
||||||
"sender_format": "AT",
|
"sender_format": "AT",
|
||||||
"random_alias_suffix": "word",
|
"random_alias_suffix": "word",
|
||||||
}
|
}
|
||||||
@ -47,7 +47,7 @@ def test_update_settings_random_alias_default_domain(flask_client):
|
|||||||
custom_domain = CustomDomain.create(
|
custom_domain = CustomDomain.create(
|
||||||
domain=random_domain(), verified=True, user_id=user.id, flush=True
|
domain=random_domain(), verified=True, user_id=user.id, flush=True
|
||||||
)
|
)
|
||||||
assert user.default_random_alias_domain() == "sl.local"
|
assert user.default_random_alias_domain() == "sl.lan"
|
||||||
|
|
||||||
r = flask_client.patch(
|
r = flask_client.patch(
|
||||||
"/api/setting", json={"random_alias_default_domain": "invalid"}
|
"/api/setting", json={"random_alias_default_domain": "invalid"}
|
||||||
@ -55,10 +55,10 @@ def test_update_settings_random_alias_default_domain(flask_client):
|
|||||||
assert r.status_code == 400
|
assert r.status_code == 400
|
||||||
|
|
||||||
r = flask_client.patch(
|
r = flask_client.patch(
|
||||||
"/api/setting", json={"random_alias_default_domain": "d1.test"}
|
"/api/setting", json={"random_alias_default_domain": "d1.lan"}
|
||||||
)
|
)
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
assert user.default_random_alias_domain() == "d1.test"
|
assert user.default_random_alias_domain() == "d1.lan"
|
||||||
|
|
||||||
r = flask_client.patch(
|
r = flask_client.patch(
|
||||||
"/api/setting", json={"random_alias_default_domain": custom_domain.domain}
|
"/api/setting", json={"random_alias_default_domain": custom_domain.domain}
|
||||||
|
@ -2,7 +2,7 @@ from random import random
|
|||||||
|
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
|
|
||||||
from app import config
|
from app.constants import JobType
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Job, ApiToCookieToken
|
from app.models import Job, ApiToCookieToken
|
||||||
from tests.api.utils import get_new_user_and_api_key
|
from tests.api.utils import get_new_user_and_api_key
|
||||||
@ -48,7 +48,7 @@ def test_delete_with_sudo(flask_client):
|
|||||||
jobs = Job.all()
|
jobs = Job.all()
|
||||||
assert len(jobs) == 1
|
assert len(jobs) == 1
|
||||||
job = jobs[0]
|
job = jobs[0]
|
||||||
assert job.name == config.JOB_DELETE_ACCOUNT
|
assert job.name == JobType.DELETE_ACCOUNT.value
|
||||||
assert job.payload == {"user_id": user.id}
|
assert job.payload == {"user_id": user.id}
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ from flask import url_for
|
|||||||
from app import config
|
from app import config
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import User, PartnerUser
|
from app.models import User, PartnerUser
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from tests.api.utils import get_new_user_and_api_key
|
from tests.api.utils import get_new_user_and_api_key
|
||||||
from tests.utils import login, random_token, random_email
|
from tests.utils import login, random_token, random_email
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ from init_app import add_sl_domains, add_proton_partner
|
|||||||
app = create_app()
|
app = create_app()
|
||||||
app.config["TESTING"] = True
|
app.config["TESTING"] = True
|
||||||
app.config["WTF_CSRF_ENABLED"] = False
|
app.config["WTF_CSRF_ENABLED"] = False
|
||||||
app.config["SERVER_NAME"] = "sl.test"
|
app.config["SERVER_NAME"] = "sl.lan"
|
||||||
|
|
||||||
# enable pg_trgm extension
|
# enable pg_trgm extension
|
||||||
with engine.connect() as conn:
|
with engine.connect() as conn:
|
||||||
|
@ -14,7 +14,7 @@ from app.models import (
|
|||||||
PartnerSubscription,
|
PartnerSubscription,
|
||||||
User,
|
User,
|
||||||
)
|
)
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.proton_partner import get_proton_partner
|
||||||
from tests.utils import create_new_user, random_token
|
from tests.utils import create_new_user, random_token
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
from flask import url_for
|
from flask import url_for
|
||||||
from app.models import Coupon
|
|
||||||
|
from app.models import Coupon, LifetimeCoupon
|
||||||
from app.utils import random_string
|
from app.utils import random_string
|
||||||
from tests.utils import login
|
from tests.utils import login
|
||||||
|
|
||||||
|
|
||||||
def test_use_coupon(flask_client):
|
def test_redeem_coupon_without_subscription(flask_client):
|
||||||
user = login(flask_client)
|
user = login(flask_client)
|
||||||
code = random_string(10)
|
code = random_string(10)
|
||||||
Coupon.create(code=code, nb_year=1, commit=True)
|
Coupon.create(code=code, nb_year=1, commit=True)
|
||||||
@ -14,7 +15,22 @@ def test_use_coupon(flask_client):
|
|||||||
data={"code": code},
|
data={"code": code},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert r.status_code == 302
|
assert r.status_code == 200
|
||||||
coupon = Coupon.get_by(code=code)
|
coupon = Coupon.get_by(code=code)
|
||||||
assert coupon.used
|
assert coupon.used
|
||||||
assert coupon.used_by_user_id == user.id
|
assert coupon.used_by_user_id == user.id
|
||||||
|
|
||||||
|
|
||||||
|
def test_redeem_lifetime_coupon(flask_client):
|
||||||
|
login(flask_client)
|
||||||
|
code = random_string(10)
|
||||||
|
LifetimeCoupon.create(code=code, nb_used=1, commit=True)
|
||||||
|
|
||||||
|
r = flask_client.post(
|
||||||
|
url_for("dashboard.lifetime_licence"),
|
||||||
|
data={"code": code},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert r.status_code == 302
|
||||||
|
coupon = LifetimeCoupon.get_by(code=code)
|
||||||
|
assert coupon.nb_used == 0
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user