Compare commits
34 Commits
Author | SHA1 | Date | |
---|---|---|---|
e47e5a5255 | |||
ed37325b32 | |||
dd6005ffdf | |||
664cd32f81 | |||
33f0eb6c41 | |||
9fd2fa9a78 | |||
3c77f8af4b | |||
545eeda79b | |||
01dba12ed0 | |||
c872d43c3d | |||
3e6867bc17 | |||
a829074584 | |||
25834e8f61 | |||
a62b43b7c4 | |||
44fda2d94e | |||
bc48198bb1 | |||
da6e56c4eb | |||
798b58529c | |||
3da6c983e1 | |||
294232a329 | |||
fae9d7bc17 | |||
d666f5af3f | |||
556fae02d5 | |||
fd4c67c3d1 | |||
edef254529 | |||
357f0cca57 | |||
8ce90e27f7 | |||
3ecc8d36f9 | |||
14f4829fab | |||
63ac89e952 | |||
8896f00124 | |||
d313c94f77 | |||
39fcf2e48f | |||
41a5a65f51 |
66
app/.github/workflows/main.yml
vendored
66
app/.github/workflows/main.yml
vendored
@ -1,6 +1,12 @@
|
||||
name: Test and lint
|
||||
name: SimpleLogin actions
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
@ -9,35 +15,34 @@ jobs:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
# Install a specific version of uv.
|
||||
version: "0.5.21"
|
||||
enable-cache: true
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
if: steps.setup-uv.outputs.cache-hit != 'true'
|
||||
run: uv sync --locked --all-extras
|
||||
|
||||
- name: Check formatting & linting
|
||||
run: |
|
||||
poetry run pre-commit run --all-files
|
||||
uv run pre-commit run --all-files
|
||||
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
|
||||
# service containers to run with `postgres-job`
|
||||
services:
|
||||
@ -69,23 +74,26 @@ jobs:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
# Install a specific version of uv.
|
||||
version: "0.5.21"
|
||||
enable-cache: true
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
if: steps.setup-uv.outputs.cache-hit != 'true'
|
||||
run: uv sync --locked --all-extras
|
||||
|
||||
|
||||
- name: Start Redis v6
|
||||
@ -95,7 +103,7 @@ jobs:
|
||||
|
||||
- name: Run db migration
|
||||
run: |
|
||||
CONFIG=tests/test.env poetry run alembic upgrade head
|
||||
CONFIG=tests/test.env uv run alembic upgrade head
|
||||
|
||||
- name: Prepare version file
|
||||
run: |
|
||||
@ -104,12 +112,12 @@ jobs:
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
poetry run pytest
|
||||
uv run pytest
|
||||
env:
|
||||
GITHUB_ACTIONS_TEST: true
|
||||
|
||||
- name: Archive code coverage results
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: code-coverage-report
|
||||
path: htmlcov
|
||||
@ -163,7 +171,7 @@ jobs:
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
|
@ -8,7 +8,7 @@ repos:
|
||||
- id: check-yaml
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.3.0
|
||||
rev: v1.34.1
|
||||
hooks:
|
||||
- id: djlint-jinja
|
||||
files: '.*\.html'
|
||||
@ -21,5 +21,4 @@ repos:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- id: ruff-format
|
1
app/.python-version
Normal file
1
app/.python-version
Normal file
@ -0,0 +1 @@
|
||||
3.10.16
|
@ -20,7 +20,7 @@ SimpleLogin backend consists of 2 main components:
|
||||
## Install dependencies
|
||||
|
||||
The project requires:
|
||||
- Python 3.10 and [rye](https://github.com/astral-sh/rye) to manage dependencies
|
||||
- Python 3.10 and uv to manage dependencies
|
||||
- Node v10 for front-end.
|
||||
- Postgres 13+
|
||||
|
||||
@ -28,7 +28,7 @@ First, install all dependencies by running the following command.
|
||||
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
||||
|
||||
```bash
|
||||
rye sync
|
||||
uv sync
|
||||
```
|
||||
|
||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||
@ -55,7 +55,7 @@ brew install -s re2 pybind11
|
||||
We use pre-commit to run all our linting and static analysis checks. Please run
|
||||
|
||||
```bash
|
||||
rye run pre-commit install
|
||||
uv run pre-commit install
|
||||
```
|
||||
|
||||
To install it in your development environment.
|
||||
@ -160,25 +160,25 @@ Here are the small sum-ups of the directory structures and their roles:
|
||||
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||
|
||||
```
|
||||
rye run ruff format .
|
||||
uv run ruff format .
|
||||
```
|
||||
|
||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||
|
||||
```bash
|
||||
rye run flake8
|
||||
uv run flake8
|
||||
```
|
||||
|
||||
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||
|
||||
```bash
|
||||
rye run djlint --check templates
|
||||
uv run djlint --check templates
|
||||
```
|
||||
|
||||
If some files aren't properly formatted, you can format all files with
|
||||
|
||||
```bash
|
||||
rye run djlint --reformat .
|
||||
uv run djlint --reformat .
|
||||
```
|
||||
|
||||
## Test sending email
|
||||
@ -223,6 +223,31 @@ Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you sho
|
||||
## Job runner
|
||||
|
||||
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
|
||||
|
||||
```bash
|
||||
python job_runner.py
|
||||
```
|
||||
|
||||
# Setup for Mac
|
||||
|
||||
There are several ways to setup Python and manage the project dependencies on Mac. For info we have successfully used this setup on a Mac silicon:
|
||||
|
||||
```bash
|
||||
# we haven't managed to make python 3.12 work
|
||||
brew install python3.10
|
||||
|
||||
# make sure to update the PATH so python, pip point to Python3
|
||||
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
||||
|
||||
# Although pipx is the recommended way to install uv,
|
||||
# install pipx via brew will automatically install python 3.12
|
||||
# and uv will then use python 3.12
|
||||
# so we recommend using uv this way instead
|
||||
curl -sSL https://install.python-uv.org | python3 -
|
||||
|
||||
uv install
|
||||
|
||||
# activate the virtualenv and you should be good to go!
|
||||
source .venv/bin/activate
|
||||
|
||||
```
|
||||
|
@ -4,43 +4,47 @@ WORKDIR /code
|
||||
COPY ./static/package*.json /code/static/
|
||||
RUN cd /code/static && npm ci
|
||||
|
||||
# Main image
|
||||
FROM python:3.10
|
||||
FROM --platform=linux/amd64 ubuntu:22.04
|
||||
|
||||
ARG UV_VERSION="0.5.21"
|
||||
ARG UV_HASH="e108c300eafae22ad8e6d94519605530f18f8762eb58d2b98a617edfb5d088fc"
|
||||
|
||||
# Keeps Python from generating .pyc files in the container
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
# Turns off buffering for easier container logging
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
# Add poetry to PATH
|
||||
ENV PATH="${PATH}:/root/.local/bin"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
# Copy poetry files
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
# Copy dependency files
|
||||
COPY pyproject.toml uv.lock .python-version ./
|
||||
|
||||
# Install and setup poetry
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||
# Remove curl and netcat from the image
|
||||
&& apt-get purge -y curl netcat-traditional \
|
||||
# Run poetry
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root \
|
||||
# Clear apt cache \
|
||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||
# Install deps
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev build-essential pkg-config cmake ninja-build bash clang \
|
||||
&& curl -sSL "https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/uv-x86_64-unknown-linux-gnu.tar.gz" > uv.tar.gz \
|
||||
&& echo "${UV_HASH} uv.tar.gz" | sha256sum -c - \
|
||||
&& tar xf uv.tar.gz -C /tmp/ \
|
||||
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uv /usr/bin/uv \
|
||||
&& mv /tmp/uv-x86_64-unknown-linux-gnu/uvx /usr/bin/uvx \
|
||||
&& rm -rf /tmp/uv* \
|
||||
&& rm -f uv.tar.gz \
|
||||
&& uv python install `cat .python-version` \
|
||||
&& uv sync --locked \
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get purge -y curl netcat-traditional build-essential pkg-config cmake ninja-build python3-dev clang\
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy code
|
||||
COPY . .
|
||||
|
||||
# copy npm packages
|
||||
COPY --from=npm /code /code
|
||||
|
||||
# copy everything else into /code
|
||||
COPY . .
|
||||
|
||||
ENV PATH="/code/.venv/bin:$PATH"
|
||||
EXPOSE 7777
|
||||
|
||||
#gunicorn wsgi:app -b 0.0.0.0:7777 -w 2 --timeout 15 --log-level DEBUG
|
||||
|
@ -84,7 +84,7 @@ For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplici
|
||||
|
||||
### DNS
|
||||
|
||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain.
|
||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to force using absolute domain.
|
||||
|
||||
|
||||
#### MX record
|
||||
|
@ -7,8 +7,4 @@ If you want be up to date on security patches, make sure your SimpleLogin image
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io.
|
||||
We will review the potential threat and fix it as fast as we can.
|
||||
|
||||
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.
|
||||
|
||||
If you want to report a vulnerability, please take a look at our bug bounty program at https://proton.me/security/bug-bounty.
|
||||
|
@ -3,12 +3,17 @@ from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import sqlalchemy.exc
|
||||
from arrow import Arrow
|
||||
from newrelic import agent
|
||||
from psycopg2.errors import UniqueViolation
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||
from app.partner_user_utils import create_partner_user, create_partner_subscription
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.errors import (
|
||||
AccountAlreadyLinkedToAnotherPartnerException,
|
||||
@ -23,12 +28,14 @@ from app.models import (
|
||||
User,
|
||||
Alias,
|
||||
)
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
class SLPlanType(Enum):
|
||||
Free = 1
|
||||
Premium = 2
|
||||
PremiumLifetime = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -52,8 +59,26 @@ class LinkResult:
|
||||
strategy: str
|
||||
|
||||
|
||||
def send_user_plan_changed_event(
|
||||
partner_user: PartnerUser,
|
||||
) -> UserPlanChanged:
|
||||
subscription_end = partner_user.user.get_active_subscription_end(
|
||||
include_partner_subscription=False
|
||||
)
|
||||
if partner_user.user.lifetime:
|
||||
event = UserPlanChanged(lifetime=True)
|
||||
elif subscription_end:
|
||||
event = UserPlanChanged(plan_end_time=subscription_end.timestamp)
|
||||
else:
|
||||
event = UserPlanChanged(plan_end_time=None)
|
||||
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
||||
Session.flush()
|
||||
return event
|
||||
|
||||
|
||||
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||
is_lifetime = plan.type == SLPlanType.PremiumLifetime
|
||||
if plan.type == SLPlanType.Free:
|
||||
if sub is not None:
|
||||
LOG.i(
|
||||
@ -62,24 +87,37 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||
PartnerSubscription.delete(sub.id)
|
||||
agent.record_custom_event("PlanChange", {"plan": "free"})
|
||||
else:
|
||||
end_time = plan.expiration
|
||||
if plan.type == SLPlanType.PremiumLifetime:
|
||||
end_time = None
|
||||
if sub is None:
|
||||
LOG.i(
|
||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] with {end_time} / {is_lifetime}"
|
||||
)
|
||||
PartnerSubscription.create(
|
||||
partner_user_id=partner_user.id,
|
||||
end_at=plan.expiration,
|
||||
create_partner_subscription(
|
||||
partner_user=partner_user,
|
||||
expiration=end_time,
|
||||
lifetime=is_lifetime,
|
||||
msg="Upgraded via partner. User did not have a previous partner subscription",
|
||||
)
|
||||
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
||||
else:
|
||||
if sub.end_at != plan.expiration:
|
||||
LOG.i(
|
||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
)
|
||||
if sub.end_at != plan.expiration or sub.lifetime != is_lifetime:
|
||||
agent.record_custom_event(
|
||||
"PlanChange", {"plan": "premium", "type": "extension"}
|
||||
)
|
||||
sub.end_at = plan.expiration
|
||||
sub.end_at = plan.expiration if not is_lifetime else None
|
||||
sub.lifetime = is_lifetime
|
||||
LOG.i(
|
||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}] to {sub.end_at} / {sub.lifetime} "
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended partner subscription",
|
||||
)
|
||||
Session.flush()
|
||||
send_user_plan_changed_event(partner_user)
|
||||
Session.commit()
|
||||
|
||||
|
||||
@ -98,12 +136,13 @@ def ensure_partner_user_exists_for_user(
|
||||
if res and res.partner_id != partner.id:
|
||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||
if not res:
|
||||
res = PartnerUser.create(
|
||||
user_id=sl_user.id,
|
||||
res = create_partner_user(
|
||||
user=sl_user,
|
||||
partner_id=partner.id,
|
||||
partner_email=link_request.email,
|
||||
external_user_id=link_request.external_user_id,
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
LOG.i(
|
||||
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
||||
@ -131,17 +170,59 @@ class ClientMergeStrategy(ABC):
|
||||
|
||||
class NewUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# Will create a new SL User with a random password
|
||||
canonical_email = canonicalize_email(self.link_request.email)
|
||||
new_user = User.create(
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
from_partner=self.link_request.from_partner,
|
||||
try:
|
||||
# Will create a new SL User with a random password
|
||||
new_user = User.create(
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
from_partner=self.link_request.from_partner,
|
||||
)
|
||||
self.create_partner_user(new_user)
|
||||
Session.commit()
|
||||
|
||||
if not new_user.created_by_partner:
|
||||
send_welcome_email(new_user)
|
||||
|
||||
agent.record_custom_event(
|
||||
"PartnerUserCreation", {"partner": self.partner.name}
|
||||
)
|
||||
|
||||
return LinkResult(
|
||||
user=new_user,
|
||||
strategy=self.__class__.__name__,
|
||||
)
|
||||
except (UniqueViolation, sqlalchemy.exc.IntegrityError) as e:
|
||||
Session.rollback()
|
||||
LOG.debug(f"Got the duplicate user error: {e}")
|
||||
return self.create_missing_link(canonical_email)
|
||||
|
||||
def create_missing_link(self, canonical_email: str):
|
||||
# If there's a unique key violation due to race conditions try to create only the partner if needed
|
||||
partner_user = PartnerUser.get_by(
|
||||
external_user_id=self.link_request.external_user_id,
|
||||
partner_id=self.partner.id,
|
||||
)
|
||||
partner_user = PartnerUser.create(
|
||||
user_id=new_user.id,
|
||||
if partner_user is None:
|
||||
# Get the user by canonical email and if not by normal email
|
||||
user = User.get_by(email=canonical_email) or User.get_by(
|
||||
email=self.link_request.email
|
||||
)
|
||||
if not user:
|
||||
raise RuntimeError(
|
||||
"Tried to create only partner on UniqueViolation but cannot find the user"
|
||||
)
|
||||
partner_user = self.create_partner_user(user)
|
||||
Session.commit()
|
||||
return LinkResult(
|
||||
user=partner_user.user, strategy=ExistingUnlinkedUserStrategy.__name__
|
||||
)
|
||||
|
||||
def create_partner_user(self, new_user: User):
|
||||
partner_user = create_partner_user(
|
||||
user=new_user,
|
||||
partner_id=self.partner.id,
|
||||
external_user_id=self.link_request.external_user_id,
|
||||
partner_email=self.link_request.email,
|
||||
@ -153,17 +234,7 @@ class NewUserStrategy(ClientMergeStrategy):
|
||||
partner_user,
|
||||
self.link_request.plan,
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if not new_user.created_by_partner:
|
||||
send_welcome_email(new_user)
|
||||
|
||||
agent.record_custom_event("PartnerUserCreation", {"partner": self.partner.name})
|
||||
|
||||
return LinkResult(
|
||||
user=new_user,
|
||||
strategy=self.__class__.__name__,
|
||||
)
|
||||
return partner_user
|
||||
|
||||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
@ -200,7 +271,7 @@ def get_login_strategy(
|
||||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||
|
||||
|
||||
def check_alias(email: str) -> bool:
|
||||
def check_alias(email: str):
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias is not None:
|
||||
raise AccountIsUsingAliasAsEmail()
|
||||
@ -275,10 +346,26 @@ def switch_already_linked_user(
|
||||
LOG.i(
|
||||
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
||||
)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=other_partner_user.user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"Deleting partner_user {other_partner_user.id} (external_user_id={other_partner_user.external_user_id} | partner_email={other_partner_user.partner_email}) from user {current_user.id}, as we received a new link request for the same partner",
|
||||
)
|
||||
PartnerUser.delete(other_partner_user.id)
|
||||
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
||||
# Link this partner_user to the current user
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"Unlinking from partner, as user will now be tied to another external account. old=(id={partner_user.user.id} | email={partner_user.user.email}) | new=(id={current_user.id} | email={current_user.email})",
|
||||
)
|
||||
partner_user.user_id = current_user.id
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.LinkAccount,
|
||||
message=f"Linking user {current_user.id} ({current_user.email}) to partner_user:{partner_user.id} (external_user_id={partner_user.external_user_id} | partner_email={partner_user.partner_email})",
|
||||
)
|
||||
# Set plan
|
||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||
Session.commit()
|
||||
|
@ -1,5 +1,5 @@
|
||||
from __future__ import annotations
|
||||
from typing import Optional
|
||||
from typing import Optional, List
|
||||
|
||||
import arrow
|
||||
import sqlalchemy
|
||||
@ -8,14 +8,18 @@ from flask_admin.form import SecureForm
|
||||
from flask_admin.model.template import EndpointLinkRowAction
|
||||
from markupsafe import Markup
|
||||
|
||||
from app import models, s3
|
||||
from app import models, s3, config
|
||||
from flask import redirect, url_for, request, flash, Response
|
||||
from flask_admin import expose, AdminIndexView
|
||||
from flask_admin.actions import action
|
||||
from flask_admin.contrib import sqla
|
||||
from flask_login import current_user
|
||||
|
||||
from app.custom_domain_validation import CustomDomainValidation, DomainValidationResult
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_network_dns_client
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||
from app.models import (
|
||||
User,
|
||||
ManualSubscription,
|
||||
@ -33,8 +37,14 @@ from app.models import (
|
||||
Mailbox,
|
||||
DeletedAlias,
|
||||
DomainDeletedAlias,
|
||||
PartnerUser,
|
||||
AliasMailbox,
|
||||
AliasAuditLog,
|
||||
UserAuditLog,
|
||||
CustomDomain,
|
||||
)
|
||||
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def _admin_action_formatter(view, context, model, name):
|
||||
@ -111,7 +121,7 @@ class SLAdminIndexView(AdminIndexView):
|
||||
if not current_user.is_authenticated or not current_user.is_admin:
|
||||
return redirect(url_for("auth.login", next=request.url))
|
||||
|
||||
return redirect("/admin/user")
|
||||
return redirect("/admin/email_search")
|
||||
|
||||
|
||||
class UserAdmin(SLModelView):
|
||||
@ -347,17 +357,42 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
|
||||
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
||||
else:
|
||||
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} extended manual subscription to user {user.email}",
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=manual_sub.end_at.timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
||||
continue
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} created manual subscription to user {user.email}",
|
||||
)
|
||||
manual_sub = ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=1, days=1),
|
||||
comment=way,
|
||||
is_giveaway=is_giveaway,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=manual_sub.end_at.timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=1, days=1),
|
||||
comment=way,
|
||||
is_giveaway=is_giveaway,
|
||||
)
|
||||
|
||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||
Session.commit()
|
||||
|
||||
|
||||
@ -449,14 +484,7 @@ class ManualSubscriptionAdmin(SLModelView):
|
||||
"Extend 1 year more?",
|
||||
)
|
||||
def extend_1y(self, ids):
|
||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||
ms.end_at = ms.end_at.shift(years=1)
|
||||
flash(f"Extend subscription for 1 year for {ms.user}", "success")
|
||||
AdminAuditLog.extend_subscription(
|
||||
current_user.id, ms.user.id, ms.end_at, "1 year"
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
self.__extend_manual_subscription(ids, msg="1 year", years=1)
|
||||
|
||||
@action(
|
||||
"extend_1m",
|
||||
@ -464,11 +492,26 @@ class ManualSubscriptionAdmin(SLModelView):
|
||||
"Extend 1 month more?",
|
||||
)
|
||||
def extend_1m(self, ids):
|
||||
self.__extend_manual_subscription(ids, msg="1 month", months=1)
|
||||
|
||||
def __extend_manual_subscription(self, ids: List[int], msg: str, **kwargs):
|
||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||
ms.end_at = ms.end_at.shift(months=1)
|
||||
flash(f"Extend subscription for 1 month for {ms.user}", "success")
|
||||
sub: ManualSubscription = ms
|
||||
sub.end_at = sub.end_at.shift(**kwargs)
|
||||
flash(f"Extend subscription for {msg} for {sub.user}", "success")
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} extended manual subscription for {msg} for {sub.user}",
|
||||
)
|
||||
AdminAuditLog.extend_subscription(
|
||||
current_user.id, ms.user.id, ms.end_at, "1 month"
|
||||
current_user.id, sub.user.id, sub.end_at, msg
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=sub.user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||
),
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
@ -733,39 +776,79 @@ class InvalidMailboxDomainAdmin(SLModelView):
|
||||
|
||||
|
||||
class EmailSearchResult:
|
||||
no_match: bool = True
|
||||
alias: Optional[Alias] = None
|
||||
mailbox: Optional[Mailbox] = None
|
||||
deleted_alias: Optional[DeletedAlias] = None
|
||||
deleted_custom_alias: Optional[DomainDeletedAlias] = None
|
||||
user: Optional[User] = None
|
||||
def __init__(self):
|
||||
self.no_match: bool = True
|
||||
self.alias: Optional[Alias] = None
|
||||
self.alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
self.mailbox: List[Mailbox] = []
|
||||
self.mailbox_count: int = 0
|
||||
self.deleted_alias: Optional[DeletedAlias] = None
|
||||
self.deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
self.domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
||||
self.domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
self.user: Optional[User] = None
|
||||
self.user_audit_log: Optional[List[UserAuditLog]] = None
|
||||
self.query: str
|
||||
|
||||
@staticmethod
|
||||
def from_email(email: str) -> EmailSearchResult:
|
||||
def from_request_email(email: str) -> EmailSearchResult:
|
||||
output = EmailSearchResult()
|
||||
output.query = email
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias:
|
||||
output.alias = alias
|
||||
output.alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_id=alias.id)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
return output
|
||||
user = User.get_by(email=email)
|
||||
try:
|
||||
user_id = int(email)
|
||||
user = User.get(user_id)
|
||||
except ValueError:
|
||||
user = User.get_by(email=email)
|
||||
if user:
|
||||
output.user = user
|
||||
output.user_audit_log = (
|
||||
UserAuditLog.filter_by(user_id=user.id)
|
||||
.order_by(UserAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
return output
|
||||
mailbox = Mailbox.get_by(email=email)
|
||||
if mailbox:
|
||||
output.mailbox = mailbox
|
||||
|
||||
user_audit_log = (
|
||||
UserAuditLog.filter_by(user_email=email)
|
||||
.order_by(UserAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
if user_audit_log:
|
||||
output.user_audit_log = user_audit_log
|
||||
output.no_match = False
|
||||
mailboxes = (
|
||||
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
||||
)
|
||||
if mailboxes:
|
||||
output.mailbox = mailboxes
|
||||
output.mailbox_count = Mailbox.filter_by(email=email).count()
|
||||
output.no_match = False
|
||||
return output
|
||||
deleted_alias = DeletedAlias.get_by(email=email)
|
||||
if deleted_alias:
|
||||
output.deleted_alias = deleted_alias
|
||||
output.deleted_alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_email=deleted_alias.email)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
return output
|
||||
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
||||
if domain_deleted_alias:
|
||||
output.domain_deleted_alias = domain_deleted_alias
|
||||
output.domain_deleted_alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_email=domain_deleted_alias.email)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
return output
|
||||
|
||||
@ -782,16 +865,41 @@ class EmailSearchHelpers:
|
||||
|
||||
@staticmethod
|
||||
def mailbox_count(user: User) -> int:
|
||||
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.asc()).count()
|
||||
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
||||
|
||||
@staticmethod
|
||||
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
|
||||
return (
|
||||
Session.query(Mailbox)
|
||||
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
|
||||
.union(
|
||||
Session.query(Mailbox)
|
||||
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
|
||||
.filter(AliasMailbox.alias_id == alias.id)
|
||||
)
|
||||
.order_by(Mailbox.id)
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def alias_mailbox_count(alias: Alias) -> int:
|
||||
return len(alias.mailboxes)
|
||||
|
||||
@staticmethod
|
||||
def alias_list(user: User) -> list[Alias]:
|
||||
return Alias.filter_by(user_id=user.id).order_by(Alias.id.asc()).limit(10).all()
|
||||
return (
|
||||
Alias.filter_by(user_id=user.id).order_by(Alias.id.desc()).limit(10).all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def alias_count(user: User) -> int:
|
||||
return Alias.filter_by(user_id=user.id).count()
|
||||
|
||||
@staticmethod
|
||||
def partner_user(user: User) -> Optional[PartnerUser]:
|
||||
return PartnerUser.get_by(user_id=user.id)
|
||||
|
||||
|
||||
class EmailSearchAdmin(BaseView):
|
||||
def is_accessible(self):
|
||||
@ -805,11 +913,10 @@ class EmailSearchAdmin(BaseView):
|
||||
@expose("/", methods=["GET", "POST"])
|
||||
def index(self):
|
||||
search = EmailSearchResult()
|
||||
email = ""
|
||||
if request.form and request.form["email"]:
|
||||
email = request.form["email"]
|
||||
email = request.args.get("email")
|
||||
if email is not None and len(email) > 0:
|
||||
email = email.strip()
|
||||
search = EmailSearchResult.from_email(email)
|
||||
search = EmailSearchResult.from_request_email(email)
|
||||
|
||||
return self.render(
|
||||
"admin/email_search.html",
|
||||
@ -817,3 +924,106 @@ class EmailSearchAdmin(BaseView):
|
||||
data=search,
|
||||
helper=EmailSearchHelpers,
|
||||
)
|
||||
|
||||
|
||||
class CustomDomainWithValidationData:
|
||||
def __init__(self, domain: CustomDomain):
|
||||
self.domain: CustomDomain = domain
|
||||
self.ownership_expected: Optional[str] = None
|
||||
self.ownership_validation: Optional[DomainValidationResult] = None
|
||||
self.mx_expected: Optional[str] = None
|
||||
self.mx_validation: Optional[DomainValidationResult] = None
|
||||
self.spf_expected: Optional[str] = None
|
||||
self.spf_validation: Optional[DomainValidationResult] = None
|
||||
self.dkim_expected: {str: str} = {}
|
||||
self.dkim_validation: {str: str} = {}
|
||||
|
||||
|
||||
class CustomDomainSearchResult:
|
||||
def __init__(self):
|
||||
self.no_match: bool = False
|
||||
self.user: Optional[User] = None
|
||||
self.domains: list[CustomDomainWithValidationData] = []
|
||||
|
||||
@staticmethod
|
||||
def from_user(user: Optional[User]) -> CustomDomainSearchResult:
|
||||
out = CustomDomainSearchResult()
|
||||
if user is None:
|
||||
out.no_match = True
|
||||
return out
|
||||
out.user = user
|
||||
dns_client = get_network_dns_client()
|
||||
validator = CustomDomainValidation(
|
||||
dkim_domain=config.EMAIL_DOMAIN,
|
||||
partner_domains=config.PARTNER_DNS_CUSTOM_DOMAINS,
|
||||
partner_domains_validation_prefixes=config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES,
|
||||
dns_client=dns_client,
|
||||
)
|
||||
for custom_domain in user.custom_domains:
|
||||
validation_data = CustomDomainWithValidationData(custom_domain)
|
||||
if not custom_domain.ownership_verified:
|
||||
validation_data.ownership_expected = (
|
||||
validator.get_ownership_verification_record(custom_domain)
|
||||
)
|
||||
validation_data.ownership_validation = (
|
||||
validator.validate_domain_ownership(custom_domain)
|
||||
)
|
||||
if not custom_domain.verified:
|
||||
validation_data.mx_expected = validator.get_expected_mx_records(
|
||||
custom_domain
|
||||
)
|
||||
validation_data.mx_validation = validator.validate_mx_records(
|
||||
custom_domain
|
||||
)
|
||||
if not custom_domain.spf_verified:
|
||||
validation_data.spf_expected = validator.get_expected_spf_record(
|
||||
custom_domain
|
||||
)
|
||||
validation_data.spf_validation = validator.validate_spf_records(
|
||||
custom_domain
|
||||
)
|
||||
if not custom_domain.dkim_verified:
|
||||
validation_data.dkim_expected = validator.get_dkim_records(
|
||||
custom_domain
|
||||
)
|
||||
validation_data.dkim_validation = validator.validate_dkim_records(
|
||||
custom_domain
|
||||
)
|
||||
out.domains.append(validation_data)
|
||||
print(validation_data.dkim_expected, validation_data.dkim_validation)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
class CustomDomainSearchAdmin(BaseView):
|
||||
def is_accessible(self):
|
||||
return current_user.is_authenticated and current_user.is_admin
|
||||
|
||||
def inaccessible_callback(self, name, **kwargs):
|
||||
# redirect to login page if user doesn't have access
|
||||
flash("You don't have access to the admin page", "error")
|
||||
return redirect(url_for("dashboard.index", next=request.url))
|
||||
|
||||
@expose("/", methods=["GET", "POST"])
|
||||
def index(self):
|
||||
query = request.args.get("user")
|
||||
if query is None:
|
||||
search = CustomDomainSearchResult()
|
||||
else:
|
||||
try:
|
||||
user_id = int(query)
|
||||
user = User.get_by(id=user_id)
|
||||
except ValueError:
|
||||
user = User.get_by(email=query)
|
||||
if user is None:
|
||||
cd = CustomDomain.get_by(domain=query)
|
||||
if cd is not None:
|
||||
user = cd.user
|
||||
search = CustomDomainSearchResult.from_user(user)
|
||||
print("NEW", search.domains)
|
||||
|
||||
return self.render(
|
||||
"admin/custom_domain_search.html",
|
||||
data=search,
|
||||
query=query,
|
||||
)
|
||||
|
38
app/app/alias_audit_log_utils.py
Normal file
38
app/app/alias_audit_log_utils.py
Normal file
@ -0,0 +1,38 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from app.models import Alias, AliasAuditLog
|
||||
|
||||
|
||||
class AliasAuditLogAction(Enum):
|
||||
CreateAlias = "create"
|
||||
ChangeAliasStatus = "change_status"
|
||||
DeleteAlias = "delete"
|
||||
UpdateAlias = "update"
|
||||
|
||||
InitiateTransferAlias = "initiate_transfer_alias"
|
||||
AcceptTransferAlias = "accept_transfer_alias"
|
||||
TransferredAlias = "transferred_alias"
|
||||
|
||||
ChangedMailboxes = "changed_mailboxes"
|
||||
|
||||
CreateContact = "create_contact"
|
||||
UpdateContact = "update_contact"
|
||||
DeleteContact = "delete_contact"
|
||||
|
||||
|
||||
def emit_alias_audit_log(
|
||||
alias: Alias,
|
||||
action: AliasAuditLogAction,
|
||||
message: str,
|
||||
user_id: Optional[int] = None,
|
||||
commit: bool = False,
|
||||
):
|
||||
AliasAuditLog.create(
|
||||
user_id=user_id or alias.user_id,
|
||||
alias_id=alias.id,
|
||||
alias_email=alias.email,
|
||||
action=action.value,
|
||||
message=message,
|
||||
commit=commit,
|
||||
)
|
61
app/app/alias_mailbox_utils.py
Normal file
61
app/app/alias_mailbox_utils.py
Normal file
@ -0,0 +1,61 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.db import Session
|
||||
from app.models import Alias, AliasMailbox, Mailbox
|
||||
|
||||
_MAX_MAILBOXES_PER_ALIAS = 20
|
||||
|
||||
|
||||
class CannotSetMailboxesForAliasCause(Enum):
|
||||
Forbidden = "Forbidden"
|
||||
EmptyMailboxes = "Must choose at least one mailbox"
|
||||
TooManyMailboxes = "Too many mailboxes"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetMailboxesForAliasResult:
|
||||
performed_change: bool
|
||||
reason: Optional[CannotSetMailboxesForAliasCause]
|
||||
|
||||
|
||||
def set_mailboxes_for_alias(
|
||||
user_id: int, alias: Alias, mailbox_ids: List[int]
|
||||
) -> Optional[CannotSetMailboxesForAliasCause]:
|
||||
if len(mailbox_ids) == 0:
|
||||
return CannotSetMailboxesForAliasCause.EmptyMailboxes
|
||||
if len(mailbox_ids) > _MAX_MAILBOXES_PER_ALIAS:
|
||||
return CannotSetMailboxesForAliasCause.TooManyMailboxes
|
||||
|
||||
mailboxes = (
|
||||
Session.query(Mailbox)
|
||||
.filter(
|
||||
Mailbox.id.in_(mailbox_ids),
|
||||
Mailbox.user_id == user_id,
|
||||
Mailbox.verified == True, # noqa: E712
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if len(mailboxes) != len(mailbox_ids):
|
||||
return CannotSetMailboxesForAliasCause.Forbidden
|
||||
|
||||
# first remove all existing alias-mailboxes links
|
||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||
Session.flush()
|
||||
|
||||
# then add all new mailboxes, being the first the one associated with the alias
|
||||
for i, mailbox in enumerate(mailboxes):
|
||||
if i == 0:
|
||||
alias.mailbox_id = mailboxes[0].id
|
||||
else:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.ChangedMailboxes,
|
||||
message=",".join([f"{mailbox.id} ({mailbox.email})" for mailbox in mailboxes]),
|
||||
)
|
||||
|
||||
return None
|
@ -58,7 +58,7 @@ def verify_prefix_suffix(
|
||||
|
||||
# alias_domain must be either one of user custom domains or built-in domains
|
||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
# SimpleLogin domain case:
|
||||
@ -75,17 +75,17 @@ def verify_prefix_suffix(
|
||||
and not config.DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
LOG.i("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
|
||||
else:
|
||||
if alias_domain not in user_custom_domains:
|
||||
if not config.DISABLE_ALIAS_SUFFIX:
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
if alias_domain not in available_sl_domains:
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -1,12 +1,14 @@
|
||||
import csv
|
||||
from io import StringIO
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from sqlalchemy.exc import IntegrityError, DataError
|
||||
from flask import make_response
|
||||
|
||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||
from app.config import (
|
||||
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
||||
BOUNCE_PREFIX,
|
||||
@ -23,6 +25,7 @@ from app.email_utils import (
|
||||
send_cannot_create_domain_alias,
|
||||
send_email,
|
||||
render,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.errors import AliasInTrashError
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
@ -30,6 +33,7 @@ from app.events.generated.event_pb2 import (
|
||||
AliasDeleted,
|
||||
AliasStatusChanged,
|
||||
EventContent,
|
||||
AliasCreated,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
@ -330,7 +334,10 @@ def try_auto_create_via_domain(address: str) -> Optional[Alias]:
|
||||
|
||||
|
||||
def delete_alias(
|
||||
alias: Alias, user: User, reason: AliasDeleteReason = AliasDeleteReason.Unspecified
|
||||
alias: Alias,
|
||||
user: User,
|
||||
reason: AliasDeleteReason = AliasDeleteReason.Unspecified,
|
||||
commit: bool = False,
|
||||
):
|
||||
"""
|
||||
Delete an alias and add it to either global or domain trash
|
||||
@ -360,12 +367,21 @@ def delete_alias(
|
||||
Session.commit()
|
||||
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
|
||||
|
||||
alias_id = alias.id
|
||||
alias_email = alias.email
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias, AliasAuditLogAction.DeleteAlias, "Alias deleted by user action"
|
||||
)
|
||||
Alias.filter(Alias.id == alias.id).delete()
|
||||
Session.commit()
|
||||
|
||||
EventDispatcher.send_event(
|
||||
user, EventContent(alias_deleted=AliasDeleted(alias_id=alias.id))
|
||||
user,
|
||||
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
|
||||
)
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
|
||||
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
|
||||
@ -439,7 +455,7 @@ def alias_export_csv(user, csv_direct_export=False):
|
||||
return output
|
||||
|
||||
|
||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
@ -493,17 +509,90 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.TransferredAlias,
|
||||
message=f"Lost ownership of alias due to alias transfer confirmed. New owner is {new_user.id}",
|
||||
user_id=old_user.id,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
old_user,
|
||||
EventContent(
|
||||
alias_deleted=AliasDeleted(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.AcceptTransferAlias,
|
||||
message=f"Accepted alias transfer from user {old_user.id}",
|
||||
user_id=new_user.id,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
new_user,
|
||||
EventContent(
|
||||
alias_created=AliasCreated(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
note=alias.note,
|
||||
enabled=alias.enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
|
||||
def change_alias_status(
|
||||
alias: Alias, enabled: bool, message: Optional[str] = None, commit: bool = False
|
||||
):
|
||||
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
||||
alias.enabled = enabled
|
||||
|
||||
event = AliasStatusChanged(
|
||||
alias_id=alias.id, alias_email=alias.email, enabled=enabled
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
enabled=enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
||||
audit_log_message = f"Set alias status to {enabled}"
|
||||
if message is not None:
|
||||
audit_log_message += f". {message}"
|
||||
emit_alias_audit_log(
|
||||
alias, AliasAuditLogAction.ChangeAliasStatus, audit_log_message
|
||||
)
|
||||
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AliasRecipientName:
|
||||
name: str
|
||||
message: Optional[str] = None
|
||||
|
||||
|
||||
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
|
||||
"""
|
||||
Logic:
|
||||
1. If alias has name, use it
|
||||
2. If alias has custom domain, and custom domain has name, use it
|
||||
3. Otherwise, use the alias email as the recipient
|
||||
"""
|
||||
if alias.name:
|
||||
return AliasRecipientName(
|
||||
name=sl_formataddr((alias.name, alias.email)),
|
||||
message=f"Put alias name {alias.name} in from header",
|
||||
)
|
||||
elif alias.custom_domain:
|
||||
if alias.custom_domain.name:
|
||||
return AliasRecipientName(
|
||||
name=sl_formataddr((alias.custom_domain.name, alias.email)),
|
||||
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
|
||||
)
|
||||
return AliasRecipientName(name=alias.email)
|
||||
|
@ -1,9 +1,13 @@
|
||||
from typing import Optional
|
||||
|
||||
from deprecated import deprecated
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
|
||||
from app import alias_utils
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.alias_mailbox_utils import set_mailboxes_for_alias
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.api.serializer import (
|
||||
AliasInfo,
|
||||
@ -26,7 +30,7 @@ from app.errors import (
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, Mailbox, AliasMailbox, AliasDeleteReason
|
||||
from app.models import Alias, Contact, Mailbox, AliasDeleteReason
|
||||
|
||||
|
||||
@deprecated
|
||||
@ -185,7 +189,11 @@ def toggle_alias(alias_id):
|
||||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
|
||||
alias_utils.change_alias_status(
|
||||
alias,
|
||||
enabled=not alias.enabled,
|
||||
message=f"Set enabled={not alias.enabled} via API",
|
||||
)
|
||||
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
||||
Session.commit()
|
||||
|
||||
@ -272,10 +280,12 @@ def update_alias(alias_id):
|
||||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
changed_fields = []
|
||||
changed = False
|
||||
if "note" in data:
|
||||
new_note = data.get("note")
|
||||
alias.note = new_note
|
||||
changed_fields.append("note")
|
||||
changed = True
|
||||
|
||||
if "mailbox_id" in data:
|
||||
@ -285,35 +295,22 @@ def update_alias(alias_id):
|
||||
return jsonify(error="Forbidden"), 400
|
||||
|
||||
alias.mailbox_id = mailbox_id
|
||||
changed_fields.append(f"mailbox_id ({mailbox_id})")
|
||||
changed = True
|
||||
|
||||
if "mailbox_ids" in data:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
mailboxes: [Mailbox] = []
|
||||
|
||||
# check if all mailboxes belong to user
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||
return jsonify(error="Forbidden"), 400
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
if not mailboxes:
|
||||
return jsonify(error="Must choose at least one mailbox"), 400
|
||||
|
||||
# <<< update alias mailboxes >>>
|
||||
# first remove all existing alias-mailboxes links
|
||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||
Session.flush()
|
||||
|
||||
# then add all new mailboxes
|
||||
for i, mailbox in enumerate(mailboxes):
|
||||
if i == 0:
|
||||
alias.mailbox_id = mailboxes[0].id
|
||||
else:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||
# <<< END update alias mailboxes >>>
|
||||
try:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
except ValueError:
|
||||
return jsonify(error="Invalid mailbox_id"), 400
|
||||
err = set_mailboxes_for_alias(
|
||||
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
||||
)
|
||||
if err:
|
||||
return jsonify(error=err.value), 400
|
||||
|
||||
mailbox_ids_string = ",".join(map(str, mailbox_ids))
|
||||
changed_fields.append(f"mailbox_ids ({mailbox_ids_string})")
|
||||
changed = True
|
||||
|
||||
if "name" in data:
|
||||
@ -325,17 +322,26 @@ def update_alias(alias_id):
|
||||
if new_name:
|
||||
new_name = new_name.replace("\n", "")
|
||||
alias.name = new_name
|
||||
changed_fields.append("name")
|
||||
changed = True
|
||||
|
||||
if "disable_pgp" in data:
|
||||
alias.disable_pgp = data.get("disable_pgp")
|
||||
changed_fields.append("disable_pgp")
|
||||
changed = True
|
||||
|
||||
if "pinned" in data:
|
||||
alias.pinned = data.get("pinned")
|
||||
changed_fields.append("pinned")
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
changed_fields_string = ",".join(changed_fields)
|
||||
emit_alias_audit_log(
|
||||
alias,
|
||||
AliasAuditLogAction.UpdateAlias,
|
||||
f"Alias fields updated ({changed_fields_string})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(ok=True), 200
|
||||
@ -416,15 +422,14 @@ def create_contact_route(alias_id):
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
alias: Alias = Alias.get(alias_id)
|
||||
|
||||
if alias.user_id != g.user.id:
|
||||
alias: Optional[Alias] = Alias.get_by(id=alias_id, user_id=g.user.id)
|
||||
if not alias:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
contact_address = data.get("contact")
|
||||
|
||||
try:
|
||||
contact = create_contact(g.user, alias, contact_address)
|
||||
contact = create_contact(alias, contact_address)
|
||||
except ErrContactErrorUpgradeNeeded as err:
|
||||
return jsonify(error=err.error_for_user()), 403
|
||||
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
||||
@ -446,11 +451,16 @@ def delete_contact(contact_id):
|
||||
200
|
||||
"""
|
||||
user = g.user
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact or contact.alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=contact.alias,
|
||||
action=AliasAuditLogAction.DeleteContact,
|
||||
message=f"Deleted contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
@ -468,12 +478,17 @@ def toggle_contact(contact_id):
|
||||
200
|
||||
"""
|
||||
user = g.user
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact or contact.alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
contact.block_forward = not contact.block_forward
|
||||
emit_alias_audit_log(
|
||||
alias=contact.alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Set contact state {contact.id} {contact.email} -> {contact.website_email} to blocked {contact.block_forward}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(block_forward=contact.block_forward), 200
|
||||
|
@ -23,6 +23,7 @@ from app.events.auth_event import LoginEvent, RegisterEvent
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
|
||||
|
||||
@ -52,8 +53,12 @@ def auth_login():
|
||||
password = data.get("password")
|
||||
device = data.get("device")
|
||||
|
||||
email = sanitize_email(data.get("email"))
|
||||
canonical_email = canonicalize_email(data.get("email"))
|
||||
email = data.get("email")
|
||||
if not email:
|
||||
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Email or password incorrect"), 400
|
||||
email = sanitize_email(email)
|
||||
canonical_email = canonicalize_email(email)
|
||||
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
|
||||
@ -183,6 +188,11 @@ def auth_activate():
|
||||
|
||||
LOG.d("activate user %s", user)
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ActivateUser,
|
||||
message=f"User has been activated: {user.email}",
|
||||
)
|
||||
AccountActivation.delete(account_activation.id)
|
||||
Session.commit()
|
||||
|
||||
|
@ -2,8 +2,10 @@ from flask import g, request
|
||||
from flask import jsonify
|
||||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.custom_domain_utils import set_custom_domain_mailboxes
|
||||
from app.db import Session
|
||||
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, DomainDeletedAlias
|
||||
|
||||
|
||||
def custom_domain_to_dict(custom_domain: CustomDomain):
|
||||
@ -100,23 +102,14 @@ def update_custom_domain(custom_domain_id):
|
||||
|
||||
if "mailbox_ids" in data:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
if mailbox_ids:
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||
return jsonify(error="Forbidden"), 400
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
result = set_custom_domain_mailboxes(user.id, custom_domain, mailbox_ids)
|
||||
if result.success:
|
||||
changed = True
|
||||
else:
|
||||
LOG.info(
|
||||
f"Prevented from updating mailboxes [custom_domain_id={custom_domain.id}]: {result.reason.value}"
|
||||
)
|
||||
return jsonify(error="Forbidden"), 400
|
||||
|
||||
if changed:
|
||||
Session.commit()
|
||||
|
@ -6,12 +6,7 @@ from flask import request
|
||||
|
||||
from app import mailbox_utils
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.dashboard.views.mailbox_detail import verify_mailbox_change
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
)
|
||||
from app.models import Mailbox
|
||||
from app.utils import sanitize_email
|
||||
|
||||
@ -38,7 +33,11 @@ def create_mailbox():
|
||||
the new mailbox dict
|
||||
"""
|
||||
user = g.user
|
||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||
email = request.get_json().get("email")
|
||||
if not email:
|
||||
return jsonify(error="Invalid email"), 400
|
||||
|
||||
mailbox_email = sanitize_email(email)
|
||||
|
||||
try:
|
||||
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
||||
@ -118,20 +117,10 @@ def update_mailbox(mailbox_id):
|
||||
|
||||
if "email" in data:
|
||||
new_email = sanitize_email(data.get("email"))
|
||||
|
||||
if mailbox_already_used(new_email, user):
|
||||
return jsonify(error=f"{new_email} already used"), 400
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
return (
|
||||
jsonify(
|
||||
error=f"{new_email} cannot be used. Please note a mailbox cannot "
|
||||
f"be a disposable email address"
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
try:
|
||||
verify_mailbox_change(user, mailbox, new_email)
|
||||
mailbox_utils.request_mailbox_email_change(user, mailbox, new_email)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
return jsonify(error=e.msg), 400
|
||||
except SMTPRecipientsRefused:
|
||||
return jsonify(error=f"Incorrect mailbox, please recheck {new_email}"), 400
|
||||
else:
|
||||
@ -141,7 +130,7 @@ def update_mailbox(mailbox_id):
|
||||
if "cancel_email_change" in data:
|
||||
cancel_email_change = data.get("cancel_email_change")
|
||||
if cancel_email_change:
|
||||
mailbox.new_email = None
|
||||
mailbox_utils.cancel_email_change(mailbox.id, user)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
|
@ -1,3 +1,4 @@
|
||||
from email_validator import EmailNotValidError
|
||||
from flask import g
|
||||
from flask import jsonify, request
|
||||
|
||||
@ -93,12 +94,15 @@ def new_custom_alias_v2():
|
||||
400,
|
||||
)
|
||||
|
||||
alias = Alias.create(
|
||||
user_id=user.id,
|
||||
email=full_alias,
|
||||
mailbox_id=user.default_mailbox_id,
|
||||
note=note,
|
||||
)
|
||||
try:
|
||||
alias = Alias.create(
|
||||
user_id=user.id,
|
||||
email=full_alias,
|
||||
mailbox_id=user.default_mailbox_id,
|
||||
note=note,
|
||||
)
|
||||
except EmailNotValidError:
|
||||
return jsonify(error="Email is not valid"), 400
|
||||
|
||||
Session.commit()
|
||||
|
||||
@ -153,8 +157,17 @@ def new_custom_alias_v3():
|
||||
if not isinstance(data, dict):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
alias_prefix_data = data.get("alias_prefix", "") or ""
|
||||
|
||||
if not isinstance(alias_prefix_data, str):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
||||
signed_suffix = data.get("signed_suffix", "") or ""
|
||||
|
||||
if not isinstance(signed_suffix, str):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
signed_suffix = signed_suffix.strip()
|
||||
|
||||
mailbox_ids = data.get("mailbox_ids")
|
||||
|
@ -144,5 +144,6 @@ def get_available_domains_for_random_alias_v2():
|
||||
@require_api_auth
|
||||
def unlink_proton_account():
|
||||
user = g.user
|
||||
perform_proton_account_unlink(user)
|
||||
if not perform_proton_account_unlink(user):
|
||||
return jsonify(error="The account cannot be unlinked"), 400
|
||||
return jsonify({"ok": True})
|
||||
|
@ -6,6 +6,7 @@ from app import config
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import Job, ApiToCookieToken
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
@api_bp.route("/user", methods=["DELETE"])
|
||||
@ -16,6 +17,11 @@ def delete_user():
|
||||
|
||||
"""
|
||||
# Schedule delete account job
|
||||
emit_user_audit_log(
|
||||
user=g.user,
|
||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||
message=f"Marked user {g.user.id} ({g.user.email}) for deletion from API",
|
||||
)
|
||||
LOG.w("schedule delete account job for %s", g.user)
|
||||
Job.create(
|
||||
name=config.JOB_DELETE_ACCOUNT,
|
||||
|
@ -87,7 +87,7 @@ def update_user_info():
|
||||
File.delete(file.id)
|
||||
s3.delete(file.path)
|
||||
Session.flush()
|
||||
else:
|
||||
if data["profile_picture"] is not None:
|
||||
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
||||
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
||||
return jsonify(error="Unsupported image format"), 400
|
||||
|
@ -7,6 +7,7 @@ from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import ActivationCode
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
|
||||
@ -47,6 +48,11 @@ def activate():
|
||||
|
||||
user = activation_code.user
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ActivateUser,
|
||||
message=f"User has been activated: {user.email}",
|
||||
)
|
||||
login_user(user)
|
||||
|
||||
# activation code is to be used only once
|
||||
|
@ -10,6 +10,7 @@ from app.events.auth_event import LoginEvent
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
from app.pw_models import PasswordOracle
|
||||
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
||||
|
||||
|
||||
@ -43,6 +44,13 @@ def login():
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
|
||||
if not user or not user.check_password(form.password.data):
|
||||
if not user:
|
||||
# Do the hash to avoid timing attacks nevertheless
|
||||
dummy_pw = PasswordOracle()
|
||||
dummy_pw.password = (
|
||||
"$2b$12$ZWqpL73h4rGNfLkJohAFAu0isqSw/bX9p/tzpbWRz/To5FAftaW8u"
|
||||
)
|
||||
dummy_pw.check_password(form.password.data)
|
||||
# Trigger rate limiter
|
||||
g.deduct_limit = True
|
||||
form.password.data = None
|
||||
|
@ -9,6 +9,7 @@ from app.auth.views.login_utils import after_login
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.models import ResetPasswordCode
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class ResetPasswordForm(FlaskForm):
|
||||
@ -59,6 +60,11 @@ def reset_password():
|
||||
|
||||
# this can be served to activate user too
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ResetPassword,
|
||||
message="User has reset their password",
|
||||
)
|
||||
|
||||
# remove all reset password codes
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
|
@ -35,6 +35,33 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
|
||||
return literal_eval(value)
|
||||
|
||||
|
||||
def get_env_dict(env_var: str) -> dict[str, str]:
|
||||
"""
|
||||
Get an env variable and convert it into a python dictionary with keys and values as strings.
|
||||
Args:
|
||||
env_var (str): env var, example: SL_DB
|
||||
|
||||
Syntax is: key1=value1;key2=value2
|
||||
Components separated by ;
|
||||
key and value separated by =
|
||||
"""
|
||||
value = os.getenv(env_var)
|
||||
if not value:
|
||||
return {}
|
||||
|
||||
components = value.split(";")
|
||||
result = {}
|
||||
for component in components:
|
||||
if component == "":
|
||||
continue
|
||||
parts = component.split("=")
|
||||
if len(parts) != 2:
|
||||
raise Exception(f"Invalid config for env var {env_var}")
|
||||
result[parts[0].strip()] = parts[1].strip()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
config_file = os.environ.get("CONFIG")
|
||||
if config_file:
|
||||
config_file = get_abs_path(config_file)
|
||||
@ -282,6 +309,7 @@ JOB_DELETE_DOMAIN = "delete-domain"
|
||||
JOB_SEND_USER_REPORT = "send-user-report"
|
||||
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||
JOB_SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
||||
|
||||
# for pagination
|
||||
PAGE_LIMIT = 20
|
||||
@ -574,7 +602,6 @@ SKIP_MX_LOOKUP_ON_CHECK = False
|
||||
|
||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||
|
||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||
|
||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||
@ -596,12 +623,45 @@ def read_webhook_enabled_user_ids() -> Optional[List[int]]:
|
||||
return None
|
||||
|
||||
ids = []
|
||||
for id in user_ids.split(","):
|
||||
for user_id in user_ids.split(","):
|
||||
try:
|
||||
ids.append(int(id.strip()))
|
||||
ids.append(int(user_id.strip()))
|
||||
except ValueError:
|
||||
pass
|
||||
return ids
|
||||
|
||||
|
||||
EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_ids()
|
||||
|
||||
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
|
||||
# It defaults to the regular DB_URI in case it's needed
|
||||
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
|
||||
|
||||
|
||||
def read_partner_dict(var: str) -> dict[int, str]:
|
||||
partner_value = get_env_dict(var)
|
||||
if len(partner_value) == 0:
|
||||
return {}
|
||||
|
||||
res: dict[int, str] = {}
|
||||
for partner_id in partner_value.keys():
|
||||
try:
|
||||
partner_id_int = int(partner_id.strip())
|
||||
res[partner_id_int] = partner_value[partner_id]
|
||||
except ValueError:
|
||||
pass
|
||||
return res
|
||||
|
||||
|
||||
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
|
||||
"PARTNER_DNS_CUSTOM_DOMAINS"
|
||||
)
|
||||
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
|
||||
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
|
||||
)
|
||||
|
||||
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
|
||||
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
|
||||
)
|
||||
|
||||
AUDIT_LOG_MAX_DAYS = int(os.environ.get("AUDIT_LOG_MAX_DAYS", 30))
|
||||
|
@ -1 +1,2 @@
|
||||
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
||||
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||
|
138
app/app/contact_utils.py
Normal file
138
app/app/contact_utils.py
Normal file
@ -0,0 +1,138 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.db import Session
|
||||
from app.email_utils import generate_reply_email, parse_full_address
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Contact, Alias
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
class ContactCreateError(Enum):
|
||||
InvalidEmail = "Invalid email"
|
||||
NotAllowed = "Your plan does not allow to create contacts"
|
||||
Unknown = "Unknown error when trying to create contact"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContactCreateResult:
|
||||
contact: Optional[Contact]
|
||||
created: bool
|
||||
error: Optional[ContactCreateError]
|
||||
|
||||
|
||||
def __update_contact_if_needed(
|
||||
contact: Contact, name: Optional[str], mail_from: Optional[str]
|
||||
) -> ContactCreateResult:
|
||||
if name and contact.name != name:
|
||||
LOG.d(f"Setting {contact} name to {name}")
|
||||
contact.name = name
|
||||
Session.commit()
|
||||
if mail_from and contact.mail_from is None:
|
||||
LOG.d(f"Setting {contact} mail_from to {mail_from}")
|
||||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
return ContactCreateResult(contact, created=False, error=None)
|
||||
|
||||
|
||||
def create_contact(
|
||||
email: str,
|
||||
alias: Alias,
|
||||
name: Optional[str] = None,
|
||||
mail_from: Optional[str] = None,
|
||||
allow_empty_email: bool = False,
|
||||
automatic_created: bool = False,
|
||||
from_partner: bool = False,
|
||||
) -> ContactCreateResult:
|
||||
# If user cannot create contacts, they still need to be created when receiving an email for an alias
|
||||
if not automatic_created and not alias.user.can_create_contacts():
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.NotAllowed
|
||||
)
|
||||
# Parse emails with form 'name <email>'
|
||||
try:
|
||||
email_name, email = parse_full_address(email)
|
||||
except ValueError:
|
||||
email = ""
|
||||
email_name = ""
|
||||
# If no name is explicitly given try to get it from the parsed email
|
||||
if name is None:
|
||||
name = email_name[: Contact.MAX_NAME_LENGTH]
|
||||
else:
|
||||
name = name[: Contact.MAX_NAME_LENGTH]
|
||||
# If still no name is there, make sure the name is None instead of empty string
|
||||
if not name:
|
||||
name = None
|
||||
if name is not None and "\x00" in name:
|
||||
LOG.w("Cannot use contact name because has \\x00")
|
||||
name = ""
|
||||
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
|
||||
email = sanitize_email(email, not_lower=True)
|
||||
if not is_valid_email(email):
|
||||
LOG.w(f"invalid contact email {email}")
|
||||
if not allow_empty_email:
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.InvalidEmail
|
||||
)
|
||||
LOG.d("Create a contact with invalid email for %s", alias)
|
||||
# either reuse a contact with empty email or create a new contact with empty email
|
||||
email = ""
|
||||
# If contact exists, update name and mail_from if needed
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
||||
if contact is not None:
|
||||
return __update_contact_if_needed(contact, name, mail_from)
|
||||
# Create the contact
|
||||
reply_email = generate_reply_email(email, alias)
|
||||
alias_id = alias.id
|
||||
try:
|
||||
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
|
||||
is_invalid_email = email == ""
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=email,
|
||||
name=name,
|
||||
reply_email=reply_email,
|
||||
mail_from=mail_from,
|
||||
automatic_created=automatic_created,
|
||||
flags=flags,
|
||||
invalid_email=is_invalid_email,
|
||||
commit=True,
|
||||
)
|
||||
contact_id = contact.id
|
||||
if automatic_created:
|
||||
trail = ". Automatically created"
|
||||
else:
|
||||
trail = ". Created by user action"
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.CreateContact,
|
||||
message=f"Created contact {contact_id} ({email}){trail}",
|
||||
commit=True,
|
||||
)
|
||||
LOG.d(
|
||||
f"Created contact {contact} for alias {alias} with email {email} invalid_email={is_invalid_email}"
|
||||
)
|
||||
return ContactCreateResult(contact, created=True, error=None)
|
||||
except IntegrityError:
|
||||
Session.rollback()
|
||||
LOG.info(
|
||||
f"Contact with email {email} for alias_id {alias_id} already existed, fetching from DB"
|
||||
)
|
||||
contact: Optional[Contact] = Contact.get_by(
|
||||
alias_id=alias_id, website_email=email
|
||||
)
|
||||
if contact:
|
||||
return __update_contact_if_needed(contact, name, mail_from)
|
||||
else:
|
||||
LOG.warning(
|
||||
f"Could not find contact with email {email} for alias_id {alias_id} and it should exist"
|
||||
)
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.Unknown
|
||||
)
|
127
app/app/coupon_utils.py
Normal file
127
app/app/coupon_utils.py
Normal file
@ -0,0 +1,127 @@
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from sqlalchemy import or_, update, and_
|
||||
|
||||
from app.config import ADMIN_EMAIL
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||
from app.log import LOG
|
||||
from app.models import User, ManualSubscription, Coupon, LifetimeCoupon
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class CouponUserCannotRedeemError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def redeem_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
||||
if user.lifetime:
|
||||
LOG.i(f"User {user} is a lifetime SL user. Cannot redeem coupons")
|
||||
raise CouponUserCannotRedeemError()
|
||||
|
||||
sub = user.get_active_subscription()
|
||||
if sub and not isinstance(sub, ManualSubscription):
|
||||
LOG.i(
|
||||
f"User {user} has an active subscription that is not manual. Cannot redeem coupon {coupon_code}"
|
||||
)
|
||||
raise CouponUserCannotRedeemError()
|
||||
|
||||
coupon = Coupon.get_by(code=coupon_code)
|
||||
if not coupon:
|
||||
LOG.i(f"User is trying to redeem coupon {coupon_code} that does not exist")
|
||||
return None
|
||||
|
||||
now = arrow.utcnow()
|
||||
stmt = (
|
||||
update(Coupon)
|
||||
.where(
|
||||
and_(
|
||||
Coupon.code == coupon_code,
|
||||
Coupon.used == False, # noqa: E712
|
||||
or_(
|
||||
Coupon.expires_date == None, # noqa: E711
|
||||
Coupon.expires_date > now,
|
||||
),
|
||||
)
|
||||
)
|
||||
.values(used=True, used_by_user_id=user.id, updated_at=now)
|
||||
)
|
||||
res = Session.execute(stmt)
|
||||
if res.rowcount == 0:
|
||||
LOG.i(f"Coupon {coupon.id} could not be redeemed. It's expired or invalid.")
|
||||
return None
|
||||
|
||||
LOG.i(
|
||||
f"Redeemed normal coupon {coupon.id} for {coupon.nb_year} years by user {user}"
|
||||
)
|
||||
if sub:
|
||||
# renew existing subscription
|
||||
if sub.end_at > arrow.now():
|
||||
sub.end_at = sub.end_at.shift(years=coupon.nb_year)
|
||||
else:
|
||||
sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||
else:
|
||||
sub = ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
|
||||
comment="using coupon code",
|
||||
is_giveaway=coupon.is_giveaway,
|
||||
commit=True,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"User {user} redeemed coupon {coupon.id} for {coupon.nb_year} years",
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||
),
|
||||
)
|
||||
Session.commit()
|
||||
return coupon
|
||||
|
||||
|
||||
def redeem_lifetime_coupon(coupon_code: str, user: User) -> Optional[Coupon]:
|
||||
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=coupon_code)
|
||||
if not coupon:
|
||||
return None
|
||||
|
||||
stmt = (
|
||||
update(LifetimeCoupon)
|
||||
.where(
|
||||
and_(
|
||||
LifetimeCoupon.code == coupon_code,
|
||||
LifetimeCoupon.nb_used > 0,
|
||||
)
|
||||
)
|
||||
.values(nb_used=LifetimeCoupon.nb_used - 1)
|
||||
)
|
||||
res = Session.execute(stmt)
|
||||
if res.rowcount == 0:
|
||||
LOG.i("Coupon could not be redeemed")
|
||||
return None
|
||||
|
||||
user.lifetime = True
|
||||
user.lifetime_coupon_id = coupon.id
|
||||
if coupon.paid:
|
||||
user.paid_lifetime = True
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(user_plan_change=UserPlanChanged(lifetime=True)),
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
# notify admin
|
||||
send_email(
|
||||
ADMIN_EMAIL,
|
||||
subject=f"User {user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
||||
plaintext="",
|
||||
html="",
|
||||
)
|
||||
|
||||
return coupon
|
206
app/app/custom_domain_utils.py
Normal file
206
app/app/custom_domain_utils.py
Normal file
@ -0,0 +1,206 @@
|
||||
import arrow
|
||||
import re
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from app.config import JOB_DELETE_DOMAIN
|
||||
from app.db import Session
|
||||
from app.email_utils import get_email_domain_part
|
||||
from app.log import LOG
|
||||
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
|
||||
_MAX_MAILBOXES_PER_DOMAIN = 20
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateCustomDomainResult:
|
||||
message: str = ""
|
||||
message_category: str = ""
|
||||
success: bool = False
|
||||
instance: Optional[CustomDomain] = None
|
||||
redirect: Optional[str] = None
|
||||
|
||||
|
||||
class CannotUseDomainReason(Enum):
|
||||
InvalidDomain = 1
|
||||
BuiltinDomain = 2
|
||||
DomainAlreadyUsed = 3
|
||||
DomainPartOfUserEmail = 4
|
||||
DomainUserInMailbox = 5
|
||||
|
||||
def message(self, domain: str) -> str:
|
||||
if self == CannotUseDomainReason.InvalidDomain:
|
||||
return "This is not a valid domain"
|
||||
elif self == CannotUseDomainReason.BuiltinDomain:
|
||||
return "A custom domain cannot be a built-in domain."
|
||||
elif self == CannotUseDomainReason.DomainAlreadyUsed:
|
||||
return f"{domain} already used"
|
||||
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
|
||||
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
|
||||
elif self == CannotUseDomainReason.DomainUserInMailbox:
|
||||
return f"{domain} already used in a SimpleLogin mailbox"
|
||||
else:
|
||||
raise Exception("Invalid CannotUseDomainReason")
|
||||
|
||||
|
||||
class CannotSetCustomDomainMailboxesCause(Enum):
|
||||
InvalidMailbox = "Something went wrong, please retry"
|
||||
NoMailboxes = "You must select at least 1 mailbox"
|
||||
TooManyMailboxes = (
|
||||
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetCustomDomainMailboxesResult:
|
||||
success: bool
|
||||
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
|
||||
|
||||
|
||||
def is_valid_domain(domain: str) -> bool:
|
||||
"""
|
||||
Checks that a domain is valid according to RFC 1035
|
||||
"""
|
||||
if len(domain) > 255:
|
||||
return False
|
||||
if domain.endswith("."):
|
||||
domain = domain[:-1] # Strip the trailing dot
|
||||
labels = domain.split(".")
|
||||
if not labels:
|
||||
return False
|
||||
for label in labels:
|
||||
if not _ALLOWED_DOMAIN_REGEX.match(label):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sanitize_domain(domain: str) -> str:
|
||||
new_domain = domain.lower().strip()
|
||||
if new_domain.startswith("http://"):
|
||||
new_domain = new_domain[len("http://") :]
|
||||
|
||||
if new_domain.startswith("https://"):
|
||||
new_domain = new_domain[len("https://") :]
|
||||
|
||||
return new_domain
|
||||
|
||||
|
||||
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
|
||||
if not is_valid_domain(domain):
|
||||
return CannotUseDomainReason.InvalidDomain
|
||||
elif SLDomain.get_by(domain=domain):
|
||||
return CannotUseDomainReason.BuiltinDomain
|
||||
elif CustomDomain.get_by(domain=domain):
|
||||
return CannotUseDomainReason.DomainAlreadyUsed
|
||||
elif get_email_domain_part(user.email) == domain:
|
||||
return CannotUseDomainReason.DomainPartOfUserEmail
|
||||
elif Mailbox.filter(
|
||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
|
||||
).first():
|
||||
return CannotUseDomainReason.DomainUserInMailbox
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def create_custom_domain(
|
||||
user: User, domain: str, partner_id: Optional[int] = None
|
||||
) -> CreateCustomDomainResult:
|
||||
if not user.is_premium():
|
||||
return CreateCustomDomainResult(
|
||||
message="Only premium plan can add custom domain",
|
||||
message_category="warning",
|
||||
)
|
||||
|
||||
new_domain = sanitize_domain(domain)
|
||||
domain_forbidden_cause = can_domain_be_used(user, new_domain)
|
||||
if domain_forbidden_cause:
|
||||
return CreateCustomDomainResult(
|
||||
message=domain_forbidden_cause.message(new_domain), message_category="error"
|
||||
)
|
||||
|
||||
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
|
||||
|
||||
# new domain has ownership verified if its parent has the ownership verified
|
||||
for root_cd in user.custom_domains:
|
||||
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
|
||||
LOG.i(
|
||||
"%s ownership verified thanks to %s",
|
||||
new_custom_domain,
|
||||
root_cd,
|
||||
)
|
||||
new_custom_domain.ownership_verified = True
|
||||
|
||||
# Add the partner_id in case it's passed
|
||||
if partner_id is not None:
|
||||
new_custom_domain.partner_id = partner_id
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateCustomDomain,
|
||||
message=f"Created custom domain {new_custom_domain.id} ({new_domain})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return CreateCustomDomainResult(
|
||||
success=True,
|
||||
instance=new_custom_domain,
|
||||
)
|
||||
|
||||
|
||||
def delete_custom_domain(domain: CustomDomain):
|
||||
# Schedule delete domain job
|
||||
LOG.w("schedule delete domain job for %s", domain)
|
||||
domain.pending_deletion = True
|
||||
Job.create(
|
||||
name=JOB_DELETE_DOMAIN,
|
||||
payload={"custom_domain_id": domain.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
|
||||
def set_custom_domain_mailboxes(
|
||||
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
|
||||
) -> SetCustomDomainMailboxesResult:
|
||||
if len(mailbox_ids) == 0:
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
|
||||
)
|
||||
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
|
||||
)
|
||||
|
||||
mailboxes = (
|
||||
Session.query(Mailbox)
|
||||
.filter(
|
||||
Mailbox.id.in_(mailbox_ids),
|
||||
Mailbox.user_id == user_id,
|
||||
Mailbox.verified == True, # noqa: E712
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if len(mailboxes) != len(mailbox_ids):
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
|
||||
)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Updated custom domain {custom_domain.id} mailboxes (domain={custom_domain.domain}) (mailboxes={mailboxes_as_str})",
|
||||
)
|
||||
Session.commit()
|
||||
return SetCustomDomainMailboxesResult(success=True)
|
@ -1,37 +1,228 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
from app import config
|
||||
from app.constants import DMARC_RECORD
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_cname_record
|
||||
from app.dns_utils import (
|
||||
MxRecord,
|
||||
DNSClient,
|
||||
is_mx_equivalent,
|
||||
get_network_dns_client,
|
||||
)
|
||||
from app.models import CustomDomain
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
@dataclass
|
||||
class DomainValidationResult:
|
||||
success: bool
|
||||
errors: [str]
|
||||
|
||||
|
||||
class CustomDomainValidation:
|
||||
def __init__(self, dkim_domain: str):
|
||||
def __init__(
|
||||
self,
|
||||
dkim_domain: str,
|
||||
dns_client: DNSClient = get_network_dns_client(),
|
||||
partner_domains: Optional[dict[int, str]] = None,
|
||||
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
|
||||
):
|
||||
self.dkim_domain = dkim_domain
|
||||
self._dkim_records = {
|
||||
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}")
|
||||
self._dns_client = dns_client
|
||||
self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
|
||||
self._partner_domain_validation_prefixes = (
|
||||
partner_domains_validation_prefixes
|
||||
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
||||
)
|
||||
|
||||
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
|
||||
prefix = "sl"
|
||||
if (
|
||||
domain.partner_id is not None
|
||||
and domain.partner_id in self._partner_domain_validation_prefixes
|
||||
):
|
||||
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
|
||||
|
||||
if not domain.ownership_txt_token:
|
||||
domain.ownership_txt_token = random_string(30)
|
||||
Session.commit()
|
||||
|
||||
return f"{prefix}-verification={domain.ownership_txt_token}"
|
||||
|
||||
def get_expected_mx_records(self, domain: CustomDomain) -> list[MxRecord]:
|
||||
records = []
|
||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||
domain = self._partner_domains[domain.partner_id]
|
||||
records.append(MxRecord(10, f"mx1.{domain}."))
|
||||
records.append(MxRecord(20, f"mx2.{domain}."))
|
||||
else:
|
||||
# Default ones
|
||||
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
||||
records.append(MxRecord(priority, domain))
|
||||
|
||||
return records
|
||||
|
||||
def get_expected_spf_domain(self, domain: CustomDomain) -> str:
|
||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||
return self._partner_domains[domain.partner_id]
|
||||
else:
|
||||
return config.EMAIL_DOMAIN
|
||||
|
||||
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
||||
spf_domain = self.get_expected_spf_domain(domain)
|
||||
return f"v=spf1 include:{spf_domain} ~all"
|
||||
|
||||
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
|
||||
"""
|
||||
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
||||
it will return the default ones or the partner ones.
|
||||
"""
|
||||
|
||||
# By default use the default domain
|
||||
dkim_domain = self.dkim_domain
|
||||
if domain.partner_id is not None:
|
||||
# Domain is from a partner. Retrieve the partner config and use that domain if exists
|
||||
dkim_domain = self._partner_domains.get(domain.partner_id, dkim_domain)
|
||||
|
||||
return {
|
||||
f"{key}._domainkey": f"{key}._domainkey.{dkim_domain}"
|
||||
for key in ("dkim", "dkim02", "dkim03")
|
||||
}
|
||||
|
||||
def get_dkim_records(self) -> {str: str}:
|
||||
"""
|
||||
Get a list of dkim records to set up. It will be
|
||||
|
||||
"""
|
||||
return self._dkim_records
|
||||
|
||||
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
||||
"""
|
||||
Check if dkim records are properly set for this custom domain.
|
||||
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
|
||||
"""
|
||||
correct_records = {}
|
||||
invalid_records = {}
|
||||
for prefix, expected_record in self.get_dkim_records():
|
||||
expected_records = self.get_dkim_records(custom_domain)
|
||||
for prefix, expected_record in expected_records.items():
|
||||
custom_record = f"{prefix}.{custom_domain.domain}"
|
||||
dkim_record = get_cname_record(custom_record)
|
||||
if dkim_record != expected_record:
|
||||
dkim_record = self._dns_client.get_cname_record(custom_record)
|
||||
if dkim_record == expected_record:
|
||||
correct_records[prefix] = custom_record
|
||||
else:
|
||||
invalid_records[custom_record] = dkim_record or "empty"
|
||||
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
|
||||
|
||||
# HACK
|
||||
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
|
||||
# the domain validated to continue seeing it as validated (although showing them the missing records).
|
||||
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
|
||||
# we will remove the dkim_verified flag.
|
||||
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
|
||||
if custom_domain.dkim_verified:
|
||||
return invalid_records
|
||||
# Check if at least the original dkim is there
|
||||
if correct_records.get("dkim._domainkey") is not None:
|
||||
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
|
||||
return invalid_records
|
||||
|
||||
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
|
||||
# rest of the code path, returning the invalid records and clearing the flag
|
||||
custom_domain.dkim_verified = len(invalid_records) == 0
|
||||
if custom_domain.dkim_verified:
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified DKIM records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return invalid_records
|
||||
|
||||
def validate_domain_ownership(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
"""
|
||||
Check if the custom_domain has added the ownership verification records
|
||||
"""
|
||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||
expected_verification_record = self.get_ownership_verification_record(
|
||||
custom_domain
|
||||
)
|
||||
|
||||
if expected_verification_record in txt_records:
|
||||
custom_domain.ownership_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified ownership for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
return DomainValidationResult(success=False, errors=txt_records)
|
||||
|
||||
def validate_mx_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
|
||||
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
||||
|
||||
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
||||
return DomainValidationResult(
|
||||
success=False,
|
||||
errors=[f"{record.priority} {record.domain}" for record in mx_domains],
|
||||
)
|
||||
else:
|
||||
custom_domain.verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified MX records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
|
||||
def validate_spf_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
||||
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
||||
if expected_spf_domain in spf_domains:
|
||||
custom_domain.spf_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified SPF records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
custom_domain.spf_verified = False
|
||||
Session.commit()
|
||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
|
||||
return DomainValidationResult(
|
||||
success=False,
|
||||
errors=cleaned_records,
|
||||
)
|
||||
|
||||
def validate_dmarc_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
|
||||
if DMARC_RECORD in txt_records:
|
||||
custom_domain.dmarc_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified DMARC records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
custom_domain.dmarc_verified = False
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=False, errors=txt_records)
|
||||
|
||||
def __clean_spf_records(
|
||||
self, txt_records: List[str], custom_domain: CustomDomain
|
||||
) -> List[str]:
|
||||
final_records = []
|
||||
verification_record = self.get_ownership_verification_record(custom_domain)
|
||||
for record in txt_records:
|
||||
if record != verification_record:
|
||||
final_records.append(record)
|
||||
return final_records
|
||||
|
@ -1,3 +1,5 @@
|
||||
import secrets
|
||||
|
||||
import arrow
|
||||
from flask import (
|
||||
render_template,
|
||||
@ -163,7 +165,7 @@ def send_reset_password_email(user):
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=random_string(60)
|
||||
user_id=user.id, code=secrets.token_urlsafe(32)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
@ -237,6 +239,8 @@ def unlink_proton_account():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
perform_proton_account_unlink(current_user)
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
if not perform_proton_account_unlink(current_user):
|
||||
flash("Account cannot be unlinked", "warning")
|
||||
else:
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
@ -1,5 +1,6 @@
|
||||
from dataclasses import dataclass
|
||||
from operator import or_
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, flash
|
||||
from flask import url_for
|
||||
@ -9,13 +10,11 @@ from sqlalchemy import and_, func, case
|
||||
from wtforms import StringField, validators, ValidationError
|
||||
|
||||
# Need to import directly from config to allow modification from the tests
|
||||
from app import config, parallel_limiter
|
||||
from app import config, parallel_limiter, contact_utils
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.contact_utils import ContactCreateError
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
generate_reply_email,
|
||||
parse_full_address,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
@ -24,8 +23,8 @@ from app.errors import (
|
||||
ErrContactAlreadyExists,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, EmailLog, User
|
||||
from app.utils import sanitize_email, CSRFValidationForm
|
||||
from app.models import Alias, Contact, EmailLog
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def email_validator():
|
||||
@ -51,7 +50,7 @@ def email_validator():
|
||||
return _check
|
||||
|
||||
|
||||
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
def create_contact(alias: Alias, contact_address: str) -> Contact:
|
||||
"""
|
||||
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
||||
Can throw exceptions:
|
||||
@ -61,37 +60,23 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||
"""
|
||||
if not contact_address:
|
||||
raise ErrAddressInvalid("Empty address")
|
||||
try:
|
||||
contact_name, contact_email = parse_full_address(contact_address)
|
||||
except ValueError:
|
||||
output = contact_utils.create_contact(email=contact_address, alias=alias)
|
||||
if output.error == ContactCreateError.InvalidEmail:
|
||||
raise ErrAddressInvalid(contact_address)
|
||||
|
||||
contact_email = sanitize_email(contact_email)
|
||||
if not is_valid_email(contact_email):
|
||||
raise ErrAddressInvalid(contact_email)
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
if contact:
|
||||
raise ErrContactAlreadyExists(contact)
|
||||
|
||||
if not user.can_create_contacts():
|
||||
elif output.error == ContactCreateError.NotAllowed:
|
||||
raise ErrContactErrorUpgradeNeeded()
|
||||
elif output.error is not None:
|
||||
raise ErrAddressInvalid("Invalid address")
|
||||
elif not output.created:
|
||||
raise ErrContactAlreadyExists(output.contact)
|
||||
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
)
|
||||
|
||||
contact = output.contact
|
||||
LOG.d(
|
||||
"create reverse-alias for %s %s, reverse alias:%s",
|
||||
contact_address,
|
||||
alias,
|
||||
contact.reply_email,
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return contact
|
||||
|
||||
@ -207,7 +192,7 @@ def get_contact_infos(
|
||||
|
||||
|
||||
def delete_contact(alias: Alias, contact_id: int):
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -215,6 +200,11 @@ def delete_contact(alias: Alias, contact_id: int):
|
||||
flash("You cannot delete reverse-alias", "warning")
|
||||
else:
|
||||
delete_contact_email = contact.website_email
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.DeleteContact,
|
||||
message=f"Delete contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
@ -237,7 +227,10 @@ def alias_contact_manager(alias_id):
|
||||
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
page = int(request.args.get("page"))
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
query = request.args.get("query") or ""
|
||||
|
||||
@ -261,7 +254,7 @@ def alias_contact_manager(alias_id):
|
||||
if new_contact_form.validate():
|
||||
contact_address = new_contact_form.email.data.strip()
|
||||
try:
|
||||
contact = create_contact(current_user, alias, contact_address)
|
||||
contact = create_contact(alias, contact_address)
|
||||
except (
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
ErrAddressInvalid,
|
||||
|
@ -7,6 +7,7 @@ from flask import render_template, redirect, url_for, flash, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import config
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.alias_utils import transfer_alias
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
@ -57,6 +58,12 @@ def alias_transfer_send_route(alias_id):
|
||||
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
||||
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
||||
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias,
|
||||
AliasAuditLogAction.InitiateTransferAlias,
|
||||
"Initiated alias transfer",
|
||||
)
|
||||
Session.commit()
|
||||
alias_transfer_url = (
|
||||
config.URL
|
||||
|
@ -1,8 +1,11 @@
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import Contact
|
||||
@ -20,7 +23,7 @@ class PGPContactForm(FlaskForm):
|
||||
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def contact_detail_route(contact_id):
|
||||
contact = Contact.get(contact_id)
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
if not contact or contact.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
@ -50,6 +53,11 @@ def contact_detail_route(contact_id):
|
||||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Added PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"PGP public key for {contact.email} is saved successfully",
|
||||
@ -62,6 +70,11 @@ def contact_detail_route(contact_id):
|
||||
)
|
||||
elif pgp_form.action.data == "remove":
|
||||
# Free user can decide to remove contact PGP key
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Removed PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||
)
|
||||
contact.pgp_public_key = None
|
||||
contact.pgp_finger_print = None
|
||||
Session.commit()
|
||||
|
@ -1,17 +1,15 @@
|
||||
import arrow
|
||||
from flask import render_template, flash, redirect, url_for, request
|
||||
from flask import render_template, flash, redirect, url_for
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
||||
from app.coupon_utils import redeem_coupon, CouponUserCannotRedeemError
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
ManualSubscription,
|
||||
Coupon,
|
||||
Subscription,
|
||||
AppleSubscription,
|
||||
CoinbaseSubscription,
|
||||
@ -58,56 +56,23 @@ def coupon_route():
|
||||
|
||||
if coupon_form.validate_on_submit():
|
||||
code = coupon_form.code.data
|
||||
|
||||
coupon: Coupon = Coupon.get_by(code=code)
|
||||
if coupon and not coupon.used:
|
||||
if coupon.expires_date and coupon.expires_date < arrow.now():
|
||||
flash(
|
||||
f"The coupon was expired on {coupon.expires_date.humanize()}",
|
||||
"error",
|
||||
)
|
||||
return redirect(request.url)
|
||||
|
||||
updated = (
|
||||
Session.query(Coupon)
|
||||
.filter_by(code=code, used=False)
|
||||
.update({"used_by_user_id": current_user.id, "used": True})
|
||||
)
|
||||
if updated != 1:
|
||||
flash("Coupon is not valid", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||
user_id=current_user.id
|
||||
)
|
||||
if manual_sub:
|
||||
# renew existing subscription
|
||||
if manual_sub.end_at > arrow.now():
|
||||
manual_sub.end_at = manual_sub.end_at.shift(years=coupon.nb_year)
|
||||
else:
|
||||
manual_sub.end_at = arrow.now().shift(years=coupon.nb_year, days=1)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Your current subscription is extended to {manual_sub.end_at.humanize()}",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
ManualSubscription.create(
|
||||
user_id=current_user.id,
|
||||
end_at=arrow.now().shift(years=coupon.nb_year, days=1),
|
||||
comment="using coupon code",
|
||||
is_giveaway=coupon.is_giveaway,
|
||||
commit=True,
|
||||
)
|
||||
try:
|
||||
coupon = redeem_coupon(code, current_user)
|
||||
if coupon:
|
||||
flash(
|
||||
"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
else:
|
||||
flash(f"Code *{code}* expired or invalid", "warning")
|
||||
else:
|
||||
flash(
|
||||
"This coupon cannot be redeemed. It's invalid or has expired",
|
||||
"warning",
|
||||
)
|
||||
except CouponUserCannotRedeemError:
|
||||
flash(
|
||||
"You have an active subscription. Please remove it before redeeming a coupon",
|
||||
"warning",
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/coupon.html",
|
||||
|
@ -5,11 +5,9 @@ from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY
|
||||
from app.custom_domain_utils import create_custom_domain
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import get_email_domain_part
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
|
||||
from app.models import CustomDomain
|
||||
|
||||
|
||||
class NewCustomDomainForm(FlaskForm):
|
||||
@ -23,13 +21,12 @@ class NewCustomDomainForm(FlaskForm):
|
||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||
def custom_domain():
|
||||
custom_domains = CustomDomain.filter_by(
|
||||
user_id=current_user.id, is_sl_subdomain=False
|
||||
user_id=current_user.id,
|
||||
is_sl_subdomain=False,
|
||||
pending_deletion=False,
|
||||
).all()
|
||||
mailboxes = current_user.mailboxes()
|
||||
new_custom_domain_form = NewCustomDomainForm()
|
||||
|
||||
errors = {}
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "create":
|
||||
if not current_user.is_premium():
|
||||
@ -37,87 +34,25 @@ def custom_domain():
|
||||
return redirect(url_for("dashboard.custom_domain"))
|
||||
|
||||
if new_custom_domain_form.validate():
|
||||
new_domain = new_custom_domain_form.domain.data.lower().strip()
|
||||
|
||||
if new_domain.startswith("http://"):
|
||||
new_domain = new_domain[len("http://") :]
|
||||
|
||||
if new_domain.startswith("https://"):
|
||||
new_domain = new_domain[len("https://") :]
|
||||
|
||||
if SLDomain.get_by(domain=new_domain):
|
||||
flash("A custom domain cannot be a built-in domain.", "error")
|
||||
elif CustomDomain.get_by(domain=new_domain):
|
||||
flash(f"{new_domain} already used", "error")
|
||||
elif get_email_domain_part(current_user.email) == new_domain:
|
||||
flash(
|
||||
"You cannot add a domain that you are currently using for your personal email. "
|
||||
"Please change your personal email to your real email",
|
||||
"error",
|
||||
)
|
||||
elif Mailbox.filter(
|
||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
|
||||
).first():
|
||||
flash(
|
||||
f"{new_domain} already used in a SimpleLogin mailbox", "error"
|
||||
)
|
||||
else:
|
||||
new_custom_domain = CustomDomain.create(
|
||||
domain=new_domain, user_id=current_user.id
|
||||
)
|
||||
# new domain has ownership verified if its parent has the ownership verified
|
||||
for root_cd in current_user.custom_domains:
|
||||
if (
|
||||
new_domain.endswith("." + root_cd.domain)
|
||||
and root_cd.ownership_verified
|
||||
):
|
||||
LOG.i(
|
||||
"%s ownership verified thanks to %s",
|
||||
new_custom_domain,
|
||||
root_cd,
|
||||
)
|
||||
new_custom_domain.ownership_verified = True
|
||||
|
||||
Session.commit()
|
||||
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
if mailbox_ids:
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if (
|
||||
not mailbox
|
||||
or mailbox.user_id != current_user.id
|
||||
or not mailbox.verified
|
||||
):
|
||||
flash("Something went wrong, please retry", "warning")
|
||||
return redirect(url_for("dashboard.custom_domain"))
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(
|
||||
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
flash(
|
||||
f"New domain {new_custom_domain.domain} is created", "success"
|
||||
)
|
||||
|
||||
res = create_custom_domain(
|
||||
user=current_user, domain=new_custom_domain_form.domain.data
|
||||
)
|
||||
if res.success:
|
||||
flash(f"New domain {res.instance.domain} is created", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns",
|
||||
custom_domain_id=new_custom_domain.id,
|
||||
custom_domain_id=res.instance.id,
|
||||
)
|
||||
)
|
||||
else:
|
||||
flash(res.message, res.message_category)
|
||||
if res.redirect:
|
||||
return redirect(url_for(res.redirect))
|
||||
|
||||
return render_template(
|
||||
"dashboard/custom_domain.html",
|
||||
custom_domains=custom_domains,
|
||||
new_custom_domain_form=new_custom_domain_form,
|
||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||
errors=errors,
|
||||
mailboxes=mailboxes,
|
||||
)
|
||||
|
@ -8,6 +8,7 @@ from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.log import LOG
|
||||
from app.models import Subscription, Job
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class DeleteDirForm(FlaskForm):
|
||||
@ -33,6 +34,11 @@ def delete_account():
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.w("schedule delete account job for %s", current_user)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
||||
)
|
||||
Job.create(
|
||||
name=JOB_DELETE_ACCOUNT,
|
||||
payload={"user_id": current_user.id},
|
||||
|
@ -1,3 +1,5 @@
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
@ -20,6 +22,7 @@ from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.errors import DirectoryInTrashError
|
||||
from app.models import Directory, Mailbox, DirectoryMailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class NewDirForm(FlaskForm):
|
||||
@ -69,7 +72,9 @@ def directory():
|
||||
if not delete_dir_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||
dir_obj: Optional[Directory] = Directory.get(
|
||||
delete_dir_form.directory_id.data
|
||||
)
|
||||
|
||||
if not dir_obj:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -79,6 +84,11 @@ def directory():
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
name = dir_obj.name
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.DeleteDirectory,
|
||||
message=f"Delete directory {dir_obj.id} ({dir_obj.name})",
|
||||
)
|
||||
Directory.delete(dir_obj.id)
|
||||
Session.commit()
|
||||
flash(f"Directory {name} has been deleted", "success")
|
||||
@ -90,7 +100,7 @@ def directory():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = toggle_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -103,6 +113,11 @@ def directory():
|
||||
dir_obj.disabled = True
|
||||
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
||||
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateDirectory,
|
||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) set disabled = {dir_obj.disabled}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
@ -112,7 +127,7 @@ def directory():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = update_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
@ -143,6 +158,12 @@ def directory():
|
||||
for mailbox in mailboxes:
|
||||
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
||||
|
||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateDirectory,
|
||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) mailboxes ({mailboxes_as_str})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"Directory {dir_obj.name} has been updated", "success")
|
||||
|
||||
@ -181,6 +202,11 @@ def directory():
|
||||
new_dir = Directory.create(
|
||||
name=new_dir_name, user_id=current_user.id
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.CreateDirectory,
|
||||
message=f"New directory {new_dir.name} ({new_dir.name})",
|
||||
)
|
||||
except DirectoryInTrashError:
|
||||
flash(
|
||||
f"{new_dir_name} has been used before and cannot be reused",
|
||||
|
@ -1,33 +1,26 @@
|
||||
import re
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators, IntegerField
|
||||
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
|
||||
from app.constants import DMARC_RECORD
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
||||
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
||||
from app.custom_domain_validation import CustomDomainValidation
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.dns_utils import (
|
||||
get_mx_domains,
|
||||
get_spf_domain,
|
||||
get_txt_record,
|
||||
is_mx_equivalent,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
CustomDomain,
|
||||
Alias,
|
||||
DomainDeletedAlias,
|
||||
Mailbox,
|
||||
DomainMailbox,
|
||||
AutoCreateRule,
|
||||
AutoCreateRuleMailbox,
|
||||
Job,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string, CSRFValidationForm
|
||||
|
||||
|
||||
@ -44,13 +37,9 @@ def domain_detail_dns(custom_domain_id):
|
||||
custom_domain.ownership_txt_token = random_string(30)
|
||||
Session.commit()
|
||||
|
||||
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
|
||||
|
||||
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||
|
||||
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
|
||||
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
|
||||
|
||||
@ -59,15 +48,14 @@ def domain_detail_dns(custom_domain_id):
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "check-ownership":
|
||||
txt_records = get_txt_record(custom_domain.domain)
|
||||
|
||||
if custom_domain.get_ownership_dns_txt_value() in txt_records:
|
||||
ownership_validation_result = domain_validator.validate_domain_ownership(
|
||||
custom_domain
|
||||
)
|
||||
if ownership_validation_result.success:
|
||||
flash(
|
||||
"Domain ownership is verified. Please proceed to the other records setup",
|
||||
"success",
|
||||
)
|
||||
custom_domain.ownership_verified = True
|
||||
Session.commit()
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns",
|
||||
@ -78,36 +66,28 @@ def domain_detail_dns(custom_domain_id):
|
||||
else:
|
||||
flash("We can't find the needed TXT record", "error")
|
||||
ownership_ok = False
|
||||
ownership_errors = txt_records
|
||||
ownership_errors = ownership_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-mx":
|
||||
mx_domains = get_mx_domains(custom_domain.domain)
|
||||
|
||||
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
|
||||
flash("The MX record is not correctly set", "warning")
|
||||
|
||||
mx_ok = False
|
||||
# build mx_errors to show to user
|
||||
mx_errors = [
|
||||
f"{priority} {domain}" for (priority, domain) in mx_domains
|
||||
]
|
||||
else:
|
||||
mx_validation_result = domain_validator.validate_mx_records(custom_domain)
|
||||
if mx_validation_result.success:
|
||||
flash(
|
||||
"Your domain can start receiving emails. You can now use it to create alias",
|
||||
"success",
|
||||
)
|
||||
custom_domain.verified = True
|
||||
Session.commit()
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
else:
|
||||
flash("The MX record is not correctly set", "warning")
|
||||
mx_ok = False
|
||||
mx_errors = mx_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-spf":
|
||||
spf_domains = get_spf_domain(custom_domain.domain)
|
||||
if EMAIL_DOMAIN in spf_domains:
|
||||
custom_domain.spf_verified = True
|
||||
Session.commit()
|
||||
spf_validation_result = domain_validator.validate_spf_records(custom_domain)
|
||||
if spf_validation_result.success:
|
||||
flash("SPF is setup correctly", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
@ -115,14 +95,12 @@ def domain_detail_dns(custom_domain_id):
|
||||
)
|
||||
)
|
||||
else:
|
||||
custom_domain.spf_verified = False
|
||||
Session.commit()
|
||||
flash(
|
||||
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
|
||||
"warning",
|
||||
)
|
||||
spf_ok = False
|
||||
spf_errors = get_txt_record(custom_domain.domain)
|
||||
spf_errors = spf_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-dkim":
|
||||
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
|
||||
@ -138,10 +116,10 @@ def domain_detail_dns(custom_domain_id):
|
||||
flash("DKIM: the CNAME record is not correctly set", "warning")
|
||||
|
||||
elif request.form.get("form-name") == "check-dmarc":
|
||||
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
|
||||
if dmarc_record in txt_records:
|
||||
custom_domain.dmarc_verified = True
|
||||
Session.commit()
|
||||
dmarc_validation_result = domain_validator.validate_dmarc_records(
|
||||
custom_domain
|
||||
)
|
||||
if dmarc_validation_result.success:
|
||||
flash("DMARC is setup correctly", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
@ -149,19 +127,23 @@ def domain_detail_dns(custom_domain_id):
|
||||
)
|
||||
)
|
||||
else:
|
||||
custom_domain.dmarc_verified = False
|
||||
Session.commit()
|
||||
flash(
|
||||
"DMARC: The TXT record is not correctly set",
|
||||
"warning",
|
||||
)
|
||||
dmarc_ok = False
|
||||
dmarc_errors = txt_records
|
||||
dmarc_errors = dmarc_validation_result.errors
|
||||
|
||||
return render_template(
|
||||
"dashboard/domain_detail/dns.html",
|
||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||
dkim_records=domain_validator.get_dkim_records(),
|
||||
ownership_record=domain_validator.get_ownership_verification_record(
|
||||
custom_domain
|
||||
),
|
||||
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
||||
dkim_records=domain_validator.get_dkim_records(custom_domain),
|
||||
spf_record=domain_validator.get_expected_spf_record(custom_domain),
|
||||
dmarc_record=DMARC_RECORD,
|
||||
**locals(),
|
||||
)
|
||||
|
||||
@ -183,6 +165,11 @@ def domain_detail(custom_domain_id):
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "switch-catch-all":
|
||||
custom_domain.catch_all = not custom_domain.catch_all
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) catch all to {custom_domain.catch_all}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if custom_domain.catch_all:
|
||||
@ -201,6 +188,11 @@ def domain_detail(custom_domain_id):
|
||||
elif request.form.get("form-name") == "set-name":
|
||||
if request.form.get("action") == "save":
|
||||
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Default alias name for Domain {custom_domain.domain} has been set",
|
||||
@ -208,6 +200,11 @@ def domain_detail(custom_domain_id):
|
||||
)
|
||||
else:
|
||||
custom_domain.name = None
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Cleared custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
||||
@ -221,6 +218,11 @@ def domain_detail(custom_domain_id):
|
||||
custom_domain.random_prefix_generation = (
|
||||
not custom_domain.random_prefix_generation
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) random prefix generation to {custom_domain.random_prefix_generation}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if custom_domain.random_prefix_generation:
|
||||
@ -238,40 +240,16 @@ def domain_detail(custom_domain_id):
|
||||
)
|
||||
elif request.form.get("form-name") == "update":
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if (
|
||||
not mailbox
|
||||
or mailbox.user_id != current_user.id
|
||||
or not mailbox.verified
|
||||
):
|
||||
flash("Something went wrong, please retry", "warning")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
mailboxes.append(mailbox)
|
||||
result = set_custom_domain_mailboxes(
|
||||
user_id=current_user.id,
|
||||
custom_domain=custom_domain,
|
||||
mailbox_ids=mailbox_ids,
|
||||
)
|
||||
|
||||
if not mailboxes:
|
||||
flash("You must select at least 1 mailbox", "warning")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
Session.commit()
|
||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||
if result.success:
|
||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||
else:
|
||||
flash(result.reason.value, "warning")
|
||||
|
||||
return redirect(
|
||||
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
||||
@ -279,16 +257,8 @@ def domain_detail(custom_domain_id):
|
||||
|
||||
elif request.form.get("form-name") == "delete":
|
||||
name = custom_domain.domain
|
||||
LOG.d("Schedule deleting %s", custom_domain)
|
||||
|
||||
# Schedule delete domain job
|
||||
LOG.w("schedule delete domain job for %s", custom_domain)
|
||||
Job.create(
|
||||
name=JOB_DELETE_DOMAIN,
|
||||
payload={"custom_domain_id": custom_domain.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
delete_custom_domain(custom_domain)
|
||||
|
||||
flash(
|
||||
f"{name} scheduled for deletion."
|
||||
|
@ -71,7 +71,10 @@ def index():
|
||||
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
page = int(request.args.get("page"))
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
highlight_alias_id = None
|
||||
if request.args.get("highlight_alias_id"):
|
||||
@ -145,11 +148,13 @@ def index():
|
||||
LOG.i(f"User {current_user} requested deletion of alias {alias}")
|
||||
email = alias.email
|
||||
alias_utils.delete_alias(
|
||||
alias, current_user, AliasDeleteReason.ManualAction
|
||||
alias, current_user, AliasDeleteReason.ManualAction, commit=True
|
||||
)
|
||||
flash(f"Alias {email} has been deleted", "success")
|
||||
elif request.form.get("form-name") == "disable-alias":
|
||||
alias_utils.change_alias_status(alias, enabled=False)
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False from dashboard"
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"Alias {alias.email} has been disabled", "success")
|
||||
|
||||
|
@ -3,11 +3,9 @@ from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.config import ADMIN_EMAIL
|
||||
from app import parallel_limiter
|
||||
from app.coupon_utils import redeem_lifetime_coupon
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email
|
||||
from app.models import LifetimeCoupon
|
||||
|
||||
|
||||
class CouponForm(FlaskForm):
|
||||
@ -16,6 +14,7 @@ class CouponForm(FlaskForm):
|
||||
|
||||
@dashboard_bp.route("/lifetime_licence", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock()
|
||||
def lifetime_licence():
|
||||
if current_user.lifetime:
|
||||
flash("You already have a lifetime licence", "warning")
|
||||
@ -32,28 +31,12 @@ def lifetime_licence():
|
||||
|
||||
if coupon_form.validate_on_submit():
|
||||
code = coupon_form.code.data
|
||||
|
||||
coupon: LifetimeCoupon = LifetimeCoupon.get_by(code=code)
|
||||
if coupon and coupon.nb_used > 0:
|
||||
coupon.nb_used -= 1
|
||||
current_user.lifetime = True
|
||||
current_user.lifetime_coupon_id = coupon.id
|
||||
if coupon.paid:
|
||||
current_user.paid_lifetime = True
|
||||
Session.commit()
|
||||
|
||||
# notify admin
|
||||
send_email(
|
||||
ADMIN_EMAIL,
|
||||
subject=f"User {current_user} used lifetime coupon({coupon.comment}). Coupon nb_used: {coupon.nb_used}",
|
||||
plaintext="",
|
||||
html="",
|
||||
)
|
||||
|
||||
coupon = redeem_lifetime_coupon(code, current_user)
|
||||
if coupon:
|
||||
flash("You are upgraded to lifetime premium!", "success")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
else:
|
||||
flash(f"Code *{code}* expired or invalid", "warning")
|
||||
flash("Coupon code expired or invalid", "warning")
|
||||
|
||||
return render_template("dashboard/lifetime_licence.html", coupon_form=coupon_form)
|
||||
|
@ -1,6 +1,7 @@
|
||||
import base64
|
||||
import binascii
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
@ -15,6 +16,7 @@ from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
@ -119,11 +121,22 @@ def mailbox_route():
|
||||
@login_required
|
||||
def mailbox_verify():
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
if not mailbox_id:
|
||||
LOG.i("Missing mailbox_id")
|
||||
flash("You followed an invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
code = request.args.get("code")
|
||||
if not code:
|
||||
# Old way
|
||||
return verify_with_signed_secret(mailbox_id)
|
||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||
|
||||
try:
|
||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
|
||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
|
||||
@ -146,7 +159,7 @@ def verify_with_signed_secret(request: str):
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_id = mailbox_data[0]
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
@ -156,6 +169,11 @@ def verify_with_signed_secret(request: str):
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
mailbox.verified = True
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.VerifyMailbox,
|
||||
message=f"Verified mailbox {mailbox.id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
|
@ -1,30 +1,31 @@
|
||||
from smtplib import SMTPRecipientsRefused
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from itsdangerous import TimestampSigner
|
||||
from wtforms import validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
from wtforms.fields.simple import StringField
|
||||
|
||||
from app import mailbox_utils
|
||||
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
||||
from app.config import URL
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import email_can_be_used_as_mailbox
|
||||
from app.email_utils import mailbox_already_used, render, send_email
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import Alias, AuthorizedAddress
|
||||
from app.mailbox_utils import (
|
||||
perform_mailbox_email_change,
|
||||
MailboxEmailChangeError,
|
||||
MailboxError,
|
||||
)
|
||||
from app.models import AuthorizedAddress
|
||||
from app.models import Mailbox
|
||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email, CSRFValidationForm
|
||||
|
||||
|
||||
class ChangeEmailForm(FlaskForm):
|
||||
email = EmailField(
|
||||
email = StringField(
|
||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||
)
|
||||
|
||||
@ -55,41 +56,30 @@ def mailbox_detail_route(mailbox_id):
|
||||
request.form.get("form-name") == "update-email"
|
||||
and change_email_form.validate_on_submit()
|
||||
):
|
||||
new_email = sanitize_email(change_email_form.email.data)
|
||||
if new_email != mailbox.email and not pending_email:
|
||||
# check if this email is not already used
|
||||
if mailbox_already_used(new_email, current_user) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash("You cannot use this email address as your mailbox", "error")
|
||||
else:
|
||||
mailbox.new_email = new_email
|
||||
Session.commit()
|
||||
|
||||
try:
|
||||
verify_mailbox_change(current_user, mailbox, new_email)
|
||||
except SMTPRecipientsRefused:
|
||||
flash(
|
||||
f"Incorrect mailbox, please recheck {mailbox.email}",
|
||||
"error",
|
||||
)
|
||||
else:
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {new_email}.",
|
||||
"success",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
try:
|
||||
response = mailbox_utils.request_mailbox_email_change(
|
||||
current_user, mailbox, change_email_form.email.data
|
||||
)
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox.email}.",
|
||||
"success",
|
||||
)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(e.msg, "error")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif request.form.get("form-name") == "force-spf":
|
||||
if not ENFORCE_SPF:
|
||||
flash("SPF enforcement globally not enabled", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
mailbox.force_spf = (
|
||||
True if request.form.get("spf-status") == "on" else False
|
||||
force_spf_value = request.form.get("spf-status") == "on"
|
||||
mailbox.force_spf = force_spf_value
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set force_spf to {force_spf_value} on mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
@ -113,6 +103,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
||||
flash(f"{address} already added", "error")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Add authorized address {address} to mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
AuthorizedAddress.create(
|
||||
user_id=current_user.id,
|
||||
mailbox_id=mailbox.id,
|
||||
@ -133,6 +128,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
else:
|
||||
address = authorized_address.email
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove authorized address {address} from mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
AuthorizedAddress.delete(authorized_address_id)
|
||||
Session.commit()
|
||||
flash(f"{address} has been deleted", "success")
|
||||
@ -165,6 +165,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Add PGP Key {mailbox.pgp_finger_print} to mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Your PGP public key is saved successfully", "success")
|
||||
return redirect(
|
||||
@ -172,6 +177,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
# Free user can decide to remove their added PGP key
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove PGP Key {mailbox.pgp_finger_print} from mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
mailbox.pgp_public_key = None
|
||||
mailbox.pgp_finger_print = None
|
||||
mailbox.disable_pgp = False
|
||||
@ -191,9 +201,19 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
else:
|
||||
mailbox.disable_pgp = False
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Enabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
flash(f"PGP is enabled on {mailbox.email}", "info")
|
||||
else:
|
||||
mailbox.disable_pgp = True
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Disabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
flash(f"PGP is disabled on {mailbox.email}", "info")
|
||||
|
||||
Session.commit()
|
||||
@ -203,6 +223,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
elif request.form.get("form-name") == "generic-subject":
|
||||
if request.form.get("action") == "save":
|
||||
mailbox.generic_subject = request.form.get("generic-subject")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Generic subject is enabled", "success")
|
||||
return redirect(
|
||||
@ -210,6 +235,11 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
mailbox.generic_subject = None
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Generic subject is disabled", "success")
|
||||
return redirect(
|
||||
@ -220,91 +250,57 @@ def mailbox_detail_route(mailbox_id):
|
||||
return render_template("dashboard/mailbox_detail.html", **locals())
|
||||
|
||||
|
||||
def verify_mailbox_change(user, mailbox, new_email):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||
verification_url = (
|
||||
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
|
||||
)
|
||||
|
||||
send_email(
|
||||
new_email,
|
||||
"Confirm mailbox change on SimpleLogin",
|
||||
render(
|
||||
"transactional/verify-mailbox-change.txt.jinja2",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=new_email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox-change.html",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=new_email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route(
|
||||
"/mailbox/<int:mailbox_id>/cancel_email_change", methods=["GET", "POST"]
|
||||
)
|
||||
@login_required
|
||||
def cancel_mailbox_change_route(mailbox_id):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
if mailbox.new_email:
|
||||
mailbox.new_email = None
|
||||
Session.commit()
|
||||
try:
|
||||
mailbox_utils.cancel_email_change(mailbox_id, current_user)
|
||||
flash("Your mailbox change is cancelled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
else:
|
||||
flash("You have no pending mailbox change", "warning")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
except MailboxError as e:
|
||||
flash(e.msg, "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/mailbox/confirm_change")
|
||||
def mailbox_confirm_change_route():
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
signed_mailbox_id = request.args.get("mailbox_id")
|
||||
def mailbox_confirm_email_change_route():
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
|
||||
try:
|
||||
mailbox_id = int(s.unsign(signed_mailbox_id, max_age=900))
|
||||
except Exception:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
else:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
# new_email can be None if user cancels change in the meantime
|
||||
if mailbox and mailbox.new_email:
|
||||
user = mailbox.user
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
flash(f"{mailbox.new_email} is already used", "error")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||
)
|
||||
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
|
||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox change %s is verified", mailbox)
|
||||
flash(f"The {mailbox.email} is updated", "success")
|
||||
code = request.args.get("code")
|
||||
if code:
|
||||
print("HAS OCO", code)
|
||||
try:
|
||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||
flash("Successfully changed mailbox email", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||
)
|
||||
else:
|
||||
except mailbox_utils.MailboxError as e:
|
||||
print(e)
|
||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
else:
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
try:
|
||||
mailbox_id = int(s.unsign(mailbox_id, max_age=900))
|
||||
res = perform_mailbox_email_change(mailbox_id)
|
||||
flash(res.message, res.message_category)
|
||||
if res.error:
|
||||
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif res.error == MailboxEmailChangeError.InvalidId:
|
||||
return redirect(url_for("dashboard.index"))
|
||||
else:
|
||||
raise Exception("Unhandled MailboxEmailChangeError")
|
||||
except Exception:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
flash("Successfully changed mailbox email", "success")
|
||||
return redirect(url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id))
|
||||
|
@ -43,7 +43,10 @@ def notification_route(notification_id):
|
||||
def notifications_route():
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
page = int(request.args.get("page"))
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
notifications = (
|
||||
Notification.filter_by(user_id=current_user.id)
|
||||
|
@ -41,7 +41,7 @@ from app.models import (
|
||||
PartnerSubscription,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.proton.utils import get_proton_partner, can_unlink_proton_account
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
@ -174,7 +174,12 @@ def setting():
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "random-alias-suffix":
|
||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||
try:
|
||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||
except ValueError:
|
||||
flash("Invalid value", "error")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if AliasSuffixEnum.has_value(scheme):
|
||||
current_user.random_alias_suffix = scheme
|
||||
Session.commit()
|
||||
@ -318,4 +323,5 @@ def setting():
|
||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
proton_linked_account=proton_linked_account,
|
||||
can_unlink_proton_account=can_unlink_proton_account(current_user),
|
||||
)
|
||||
|
@ -11,6 +11,7 @@ from app.dashboard.base import dashboard_bp
|
||||
from app.errors import SubdomainInTrashError
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, Mailbox, SLDomain
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
# Only lowercase letters, numbers, dashes (-) are currently supported
|
||||
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
||||
@ -102,6 +103,12 @@ def subdomain_route():
|
||||
ownership_verified=True,
|
||||
commit=True,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.CreateCustomDomain,
|
||||
message=f"Create subdomain {new_custom_domain.id} ({full_domain})",
|
||||
commit=True,
|
||||
)
|
||||
except SubdomainInTrashError:
|
||||
flash(
|
||||
f"{full_domain} has been used before and cannot be reused",
|
||||
|
@ -32,7 +32,9 @@ def unsubscribe(alias_id):
|
||||
|
||||
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
||||
if request.method == "POST":
|
||||
alias_utils.change_alias_status(alias, False)
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False from unsubscribe request"
|
||||
)
|
||||
flash(f"Alias {alias.email} has been blocked", "success")
|
||||
Session.commit()
|
||||
|
||||
|
@ -1,102 +1,22 @@
|
||||
from app import config
|
||||
from typing import Optional, List, Tuple
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
import dns.resolver
|
||||
|
||||
|
||||
def _get_dns_resolver():
|
||||
my_resolver = dns.resolver.Resolver()
|
||||
my_resolver.nameservers = config.NAMESERVERS
|
||||
|
||||
return my_resolver
|
||||
|
||||
|
||||
def get_ns(hostname) -> [str]:
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
return [a.to_text() for a in answers]
|
||||
|
||||
|
||||
def get_cname_record(hostname) -> Optional[str]:
|
||||
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
for a in answers:
|
||||
ret = a.to_text()
|
||||
return ret[:-1]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_mx_domains(hostname) -> [(int, str)]:
|
||||
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers:
|
||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||
parts = record.split(" ")
|
||||
|
||||
ret.append((int(parts[0]), parts[1]))
|
||||
|
||||
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
||||
|
||||
from app.config import NAMESERVERS
|
||||
|
||||
_include_spf = "include:"
|
||||
|
||||
|
||||
def get_spf_domain(hostname) -> [str]:
|
||||
"""return all domains listed in *include:*"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
record = record.decode() # record is bytes
|
||||
|
||||
if record.startswith("v=spf1"):
|
||||
parts = record.split(" ")
|
||||
for part in parts:
|
||||
if part.startswith(_include_spf):
|
||||
ret.append(part[part.find(_include_spf) + len(_include_spf) :])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_txt_record(hostname) -> [str]:
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
record = record.decode() # record is bytes
|
||||
|
||||
ret.append(record)
|
||||
|
||||
return ret
|
||||
@dataclass
|
||||
class MxRecord:
|
||||
priority: int
|
||||
domain: str
|
||||
|
||||
|
||||
def is_mx_equivalent(
|
||||
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
|
||||
mx_domains: List[MxRecord], ref_mx_domains: List[MxRecord]
|
||||
) -> bool:
|
||||
"""
|
||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||
@ -105,16 +25,127 @@ def is_mx_equivalent(
|
||||
The priority order is taken into account but not the priority number.
|
||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||
"""
|
||||
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
|
||||
ref_mx_domains = sorted(
|
||||
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
|
||||
)
|
||||
mx_domains = sorted(mx_domains, key=lambda x: x.priority)
|
||||
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x.priority)
|
||||
|
||||
if len(mx_domains) < len(ref_mx_domains):
|
||||
return False
|
||||
|
||||
for i in range(0, len(ref_mx_domains)):
|
||||
if mx_domains[i][1] != ref_mx_domains[i][1]:
|
||||
for actual, expected in zip(mx_domains, ref_mx_domains):
|
||||
if actual.domain != expected.domain:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class DNSClient(ABC):
|
||||
@abstractmethod
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||
pass
|
||||
|
||||
def get_spf_domain(self, hostname: str) -> List[str]:
|
||||
"""
|
||||
return all domains listed in *include:*
|
||||
"""
|
||||
try:
|
||||
records = self.get_txt_record(hostname)
|
||||
ret = []
|
||||
for record in records:
|
||||
if record.startswith("v=spf1"):
|
||||
parts = record.split(" ")
|
||||
for part in parts:
|
||||
if part.startswith(_include_spf):
|
||||
ret.append(
|
||||
part[part.find(_include_spf) + len(_include_spf) :]
|
||||
)
|
||||
return ret
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
@abstractmethod
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
pass
|
||||
|
||||
|
||||
class NetworkDNSClient(DNSClient):
|
||||
def __init__(self, nameservers: List[str]):
|
||||
self._resolver = dns.resolver.Resolver()
|
||||
self._resolver.nameservers = nameservers
|
||||
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
"""
|
||||
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
|
||||
"""
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "CNAME", search=True)
|
||||
for a in answers:
|
||||
ret = a.to_text()
|
||||
return ret[:-1]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||
"""
|
||||
return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "MX", search=True)
|
||||
ret = []
|
||||
for a in answers:
|
||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||
parts = record.split(" ")
|
||||
ret.append(MxRecord(priority=int(parts[0]), domain=parts[1]))
|
||||
return sorted(ret, key=lambda x: x.priority)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "TXT", search=False)
|
||||
ret = []
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
ret.append(record.decode())
|
||||
return ret
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
class InMemoryDNSClient(DNSClient):
|
||||
def __init__(self):
|
||||
self.cname_records: dict[str, Optional[str]] = {}
|
||||
self.mx_records: dict[str, List[MxRecord]] = {}
|
||||
self.spf_records: dict[str, List[str]] = {}
|
||||
self.txt_records: dict[str, List[str]] = {}
|
||||
|
||||
def set_cname_record(self, hostname: str, cname: str):
|
||||
self.cname_records[hostname] = cname
|
||||
|
||||
def set_mx_records(self, hostname: str, mx_list: List[MxRecord]):
|
||||
self.mx_records[hostname] = mx_list
|
||||
|
||||
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
||||
self.txt_records[hostname] = txt_list
|
||||
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
return self.cname_records.get(hostname)
|
||||
|
||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||
mx_list = self.mx_records.get(hostname, [])
|
||||
return sorted(mx_list, key=lambda x: x.priority)
|
||||
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
return self.txt_records.get(hostname, [])
|
||||
|
||||
|
||||
def get_network_dns_client() -> NetworkDNSClient:
|
||||
return NetworkDNSClient(NAMESERVERS)
|
||||
|
||||
|
||||
def get_mx_domains(hostname: str) -> List[MxRecord]:
|
||||
return get_network_dns_client().get_mx_domains(hostname)
|
||||
|
@ -592,7 +592,7 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
|
||||
|
||||
from app.models import CustomDomain
|
||||
|
||||
if CustomDomain.get_by(domain=domain, verified=True):
|
||||
if CustomDomain.get_by(domain=domain, is_sl_subdomain=True, verified=True):
|
||||
LOG.d("domain %s is a SimpleLogin custom domain", domain)
|
||||
return False
|
||||
|
||||
@ -657,7 +657,7 @@ def get_mx_domain_list(domain) -> [str]:
|
||||
"""
|
||||
priority_domains = get_mx_domains(domain)
|
||||
|
||||
return [d[:-1] for _, d in priority_domains]
|
||||
return [d.domain[:-1] for d in priority_domains]
|
||||
|
||||
|
||||
def personal_email_already_used(email_address: str) -> bool:
|
||||
@ -1345,17 +1345,16 @@ def get_queue_id(msg: Message) -> Optional[str]:
|
||||
|
||||
received_header = str(msg[headers.RECEIVED])
|
||||
if not received_header:
|
||||
return
|
||||
return None
|
||||
|
||||
# received_header looks like 'from mail-wr1-x434.google.com (mail-wr1-x434.google.com [IPv6:2a00:1450:4864:20::434])\r\n\t(using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))\r\n\t(No client certificate requested)\r\n\tby mx1.simplelogin.co (Postfix) with ESMTPS id 4FxQmw1DXdz2vK2\r\n\tfor <jglfdjgld@alias.com>; Fri, 4 Jun 2021 14:55:43 +0000 (UTC)'
|
||||
search_result = re.search("with ESMTPS id [0-9a-zA-Z]{1,}", received_header)
|
||||
if not search_result:
|
||||
return
|
||||
|
||||
# the "with ESMTPS id 4FxQmw1DXdz2vK2" part
|
||||
with_esmtps = received_header[search_result.start() : search_result.end()]
|
||||
|
||||
return with_esmtps[len("with ESMTPS id ") :]
|
||||
search_result = re.search(r"with E?SMTP[AS]? id ([0-9a-zA-Z]{1,})", received_header)
|
||||
if search_result:
|
||||
return search_result.group(1)
|
||||
search_result = re.search("\(Postfix\)\r\n\tid ([a-zA-Z0-9]{1,});", received_header)
|
||||
if search_result:
|
||||
return search_result.group(1)
|
||||
return None
|
||||
|
||||
|
||||
def should_ignore_bounce(mail_from: str) -> bool:
|
||||
|
@ -1,8 +1,12 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import newrelic.agent
|
||||
|
||||
from app import config
|
||||
from app.db import Session
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.events.generated import event_pb2
|
||||
from app.log import LOG
|
||||
from app.models import User, PartnerUser, SyncEvent
|
||||
from app.proton.utils import get_proton_partner
|
||||
from typing import Optional
|
||||
@ -26,26 +30,43 @@ class PostgresDispatcher(Dispatcher):
|
||||
return PostgresDispatcher()
|
||||
|
||||
|
||||
class GlobalDispatcher:
|
||||
__dispatcher: Optional[Dispatcher] = None
|
||||
|
||||
@staticmethod
|
||||
def get_dispatcher() -> Dispatcher:
|
||||
if not GlobalDispatcher.__dispatcher:
|
||||
GlobalDispatcher.__dispatcher = PostgresDispatcher.get()
|
||||
return GlobalDispatcher.__dispatcher
|
||||
|
||||
@staticmethod
|
||||
def set_dispatcher(dispatcher: Optional[Dispatcher]):
|
||||
GlobalDispatcher.__dispatcher = dispatcher
|
||||
|
||||
|
||||
class EventDispatcher:
|
||||
@staticmethod
|
||||
def send_event(
|
||||
user: User,
|
||||
content: event_pb2.EventContent,
|
||||
dispatcher: Dispatcher = PostgresDispatcher.get(),
|
||||
dispatcher: Optional[Dispatcher] = None,
|
||||
skip_if_webhook_missing: bool = True,
|
||||
):
|
||||
if dispatcher is None:
|
||||
dispatcher = GlobalDispatcher.get_dispatcher()
|
||||
if config.EVENT_WEBHOOK_DISABLE:
|
||||
LOG.i("Not sending events because webhook is disabled")
|
||||
return
|
||||
|
||||
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
|
||||
LOG.i(
|
||||
"Not sending events because webhook is not configured and allowed to be empty"
|
||||
)
|
||||
return
|
||||
|
||||
if config.EVENT_WEBHOOK_ENABLED_USER_IDS is not None:
|
||||
if user.id not in config.EVENT_WEBHOOK_ENABLED_USER_IDS:
|
||||
return
|
||||
|
||||
partner_user = EventDispatcher.__partner_user(user.id)
|
||||
if not partner_user:
|
||||
LOG.i(f"Not sending events because there's no partner user for user {user}")
|
||||
return
|
||||
|
||||
event = event_pb2.Event(
|
||||
@ -58,6 +79,10 @@ class EventDispatcher:
|
||||
serialized = event.SerializeToString()
|
||||
dispatcher.send(serialized)
|
||||
|
||||
event_type = content.WhichOneof("content")
|
||||
newrelic.agent.record_custom_event("EventStoredToDb", {"type": event_type})
|
||||
LOG.i("Sent event to the dispatcher")
|
||||
|
||||
@staticmethod
|
||||
def __partner_user(user_id: int) -> Optional[PartnerUser]:
|
||||
# Check if the current user has a partner_id
|
||||
|
@ -24,7 +24,7 @@ _sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\":\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\x12\x10\n\x08lifetime\x18\x02 \x01(\x08\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
@ -32,19 +32,19 @@ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
DESCRIPTOR._loaded_options = None
|
||||
_globals['_USERPLANCHANGED']._serialized_start=35
|
||||
_globals['_USERPLANCHANGED']._serialized_end=75
|
||||
_globals['_USERDELETED']._serialized_start=77
|
||||
_globals['_USERDELETED']._serialized_end=90
|
||||
_globals['_ALIASCREATED']._serialized_start=92
|
||||
_globals['_ALIASCREATED']._serialized_end=182
|
||||
_globals['_ALIASSTATUSCHANGED']._serialized_start=184
|
||||
_globals['_ALIASSTATUSCHANGED']._serialized_end=260
|
||||
_globals['_ALIASDELETED']._serialized_start=262
|
||||
_globals['_ALIASDELETED']._serialized_end=315
|
||||
_globals['_ALIASCREATEDLIST']._serialized_start=317
|
||||
_globals['_ALIASCREATEDLIST']._serialized_end=385
|
||||
_globals['_EVENTCONTENT']._serialized_start=388
|
||||
_globals['_EVENTCONTENT']._serialized_end=791
|
||||
_globals['_EVENT']._serialized_start=793
|
||||
_globals['_EVENT']._serialized_end=914
|
||||
_globals['_USERPLANCHANGED']._serialized_end=93
|
||||
_globals['_USERDELETED']._serialized_start=95
|
||||
_globals['_USERDELETED']._serialized_end=108
|
||||
_globals['_ALIASCREATED']._serialized_start=110
|
||||
_globals['_ALIASCREATED']._serialized_end=202
|
||||
_globals['_ALIASSTATUSCHANGED']._serialized_start=204
|
||||
_globals['_ALIASSTATUSCHANGED']._serialized_end=288
|
||||
_globals['_ALIASDELETED']._serialized_start=290
|
||||
_globals['_ALIASDELETED']._serialized_end=331
|
||||
_globals['_ALIASCREATEDLIST']._serialized_start=333
|
||||
_globals['_ALIASCREATEDLIST']._serialized_end=401
|
||||
_globals['_EVENTCONTENT']._serialized_start=404
|
||||
_globals['_EVENTCONTENT']._serialized_end=807
|
||||
_globals['_EVENT']._serialized_start=809
|
||||
_globals['_EVENT']._serialized_end=930
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
|
@ -6,44 +6,50 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
|
||||
DESCRIPTOR: _descriptor.FileDescriptor
|
||||
|
||||
class UserPlanChanged(_message.Message):
|
||||
__slots__ = ("plan_end_time",)
|
||||
__slots__ = ("plan_end_time", "lifetime")
|
||||
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||
LIFETIME_FIELD_NUMBER: _ClassVar[int]
|
||||
plan_end_time: int
|
||||
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
|
||||
lifetime: bool
|
||||
def __init__(self, plan_end_time: _Optional[int] = ..., lifetime: bool = ...) -> None: ...
|
||||
|
||||
class UserDeleted(_message.Message):
|
||||
__slots__ = ()
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
class AliasCreated(_message.Message):
|
||||
__slots__ = ("alias_id", "alias_email", "alias_note", "enabled")
|
||||
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_NOTE_FIELD_NUMBER: _ClassVar[int]
|
||||
__slots__ = ("id", "email", "note", "enabled", "created_at")
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
NOTE_FIELD_NUMBER: _ClassVar[int]
|
||||
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||
alias_id: int
|
||||
alias_email: str
|
||||
alias_note: str
|
||||
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
|
||||
id: int
|
||||
email: str
|
||||
note: str
|
||||
enabled: bool
|
||||
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ...
|
||||
created_at: int
|
||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., note: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
|
||||
|
||||
class AliasStatusChanged(_message.Message):
|
||||
__slots__ = ("alias_id", "alias_email", "enabled")
|
||||
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
__slots__ = ("id", "email", "enabled", "created_at")
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||
alias_id: int
|
||||
alias_email: str
|
||||
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
|
||||
id: int
|
||||
email: str
|
||||
enabled: bool
|
||||
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., enabled: bool = ...) -> None: ...
|
||||
created_at: int
|
||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
|
||||
|
||||
class AliasDeleted(_message.Message):
|
||||
__slots__ = ("alias_id", "alias_email")
|
||||
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
alias_id: int
|
||||
alias_email: str
|
||||
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ...
|
||||
__slots__ = ("id", "email")
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||
id: int
|
||||
email: str
|
||||
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ...) -> None: ...
|
||||
|
||||
class AliasCreatedList(_message.Message):
|
||||
__slots__ = ("events",)
|
||||
|
@ -33,8 +33,11 @@ from app.models import (
|
||||
SLDomain,
|
||||
Hibp,
|
||||
AliasHibp,
|
||||
PartnerUser,
|
||||
PartnerSubscription,
|
||||
)
|
||||
from app.pgp_utils import load_public_key
|
||||
from app.proton.utils import get_proton_partner
|
||||
|
||||
|
||||
def fake_data():
|
||||
@ -269,3 +272,27 @@ def fake_data():
|
||||
CustomDomain.create(
|
||||
user_id=user.id, domain="old.com", verified=True, ownership_verified=True
|
||||
)
|
||||
|
||||
# Create a user
|
||||
proton_partner = get_proton_partner()
|
||||
user = User.create(
|
||||
email="test@proton.me",
|
||||
name="Proton test",
|
||||
password="password",
|
||||
activated=True,
|
||||
is_admin=False,
|
||||
intro_shown=True,
|
||||
from_partner=True,
|
||||
flush=True,
|
||||
)
|
||||
pu = PartnerUser.create(
|
||||
user_id=user.id,
|
||||
partner_id=proton_partner.id,
|
||||
partner_email="test@proton.me",
|
||||
external_user_id="DUMMY",
|
||||
flush=True,
|
||||
)
|
||||
PartnerSubscription.create(
|
||||
partner_user_id=pu.id, end_at=arrow.now().shift(years=1, days=1)
|
||||
)
|
||||
Session.commit()
|
||||
|
@ -103,7 +103,9 @@ class UnsubscribeHandler:
|
||||
):
|
||||
return status.E509
|
||||
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
||||
alias_utils.change_alias_status(alias, enabled=False)
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False via unsubscribe header"
|
||||
)
|
||||
Session.commit()
|
||||
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
||||
for mailbox in alias.mailboxes:
|
||||
|
@ -1,3 +1,5 @@
|
||||
import newrelic.agent
|
||||
|
||||
from app.events.event_dispatcher import EventDispatcher, Dispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
|
||||
from app.log import LOG
|
||||
@ -12,6 +14,7 @@ def send_alias_creation_events_for_user(
|
||||
return
|
||||
chunk_size = min(chunk_size, 50)
|
||||
event_list = []
|
||||
LOG.i("Sending alias create events for user {user}")
|
||||
for alias in (
|
||||
Alias.yield_per_query(chunk_size)
|
||||
.filter_by(user_id=user.id)
|
||||
@ -19,22 +22,31 @@ def send_alias_creation_events_for_user(
|
||||
):
|
||||
event_list.append(
|
||||
AliasCreated(
|
||||
alias_id=alias.id,
|
||||
alias_email=alias.email,
|
||||
alias_note=alias.note,
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
note=alias.note,
|
||||
enabled=alias.enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
)
|
||||
if len(event_list) >= chunk_size:
|
||||
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
|
||||
EventDispatcher.send_event(
|
||||
user,
|
||||
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
|
||||
dispatcher=dispatcher,
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/event_alias_created_event", len(event_list)
|
||||
)
|
||||
event_list = []
|
||||
if len(event_list) > 0:
|
||||
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
|
||||
EventDispatcher.send_event(
|
||||
user,
|
||||
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
|
||||
dispatcher=dispatcher,
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/event_alias_created_event", len(event_list)
|
||||
)
|
||||
|
70
app/app/jobs/send_event_job.py
Normal file
70
app/app/jobs/send_event_job.py
Normal file
@ -0,0 +1,70 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
|
||||
from app import config
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.events.generated import event_pb2
|
||||
from app.events.generated.event_pb2 import EventContent
|
||||
from app.models import (
|
||||
User,
|
||||
Job,
|
||||
PartnerUser,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
from events.event_sink import EventSink
|
||||
|
||||
|
||||
class SendEventToWebhookJob:
|
||||
def __init__(self, user: User, event: EventContent):
|
||||
self._user: User = user
|
||||
self._event: EventContent = event
|
||||
|
||||
def run(self, sink: EventSink) -> bool:
|
||||
# Check if the current user has a partner_id
|
||||
try:
|
||||
proton_partner_id = get_proton_partner().id
|
||||
except ProtonPartnerNotSetUp:
|
||||
return False
|
||||
|
||||
# It has. Retrieve the information for the PartnerUser
|
||||
partner_user = PartnerUser.get_by(
|
||||
user_id=self._user.id, partner_id=proton_partner_id
|
||||
)
|
||||
if partner_user is None:
|
||||
return True
|
||||
event = event_pb2.Event(
|
||||
user_id=self._user.id,
|
||||
external_user_id=partner_user.external_user_id,
|
||||
partner_id=partner_user.partner_id,
|
||||
content=self._event,
|
||||
)
|
||||
|
||||
serialized = event.SerializeToString()
|
||||
return sink.send_data_to_webhook(serialized)
|
||||
|
||||
@staticmethod
|
||||
def create_from_job(job: Job) -> Optional[SendEventToWebhookJob]:
|
||||
user = User.get(job.payload["user_id"])
|
||||
if not user:
|
||||
return None
|
||||
event_data = base64.b64decode(job.payload["event"])
|
||||
event = event_pb2.EventContent()
|
||||
event.ParseFromString(event_data)
|
||||
|
||||
return SendEventToWebhookJob(user=user, event=event)
|
||||
|
||||
def store_job_in_db(self, run_at: Optional[arrow.Arrow]) -> Job:
|
||||
stub = self._event.SerializeToString()
|
||||
return Job.create(
|
||||
name=config.JOB_SEND_EVENT_TO_WEBHOOK,
|
||||
payload={
|
||||
"user_id": self._user.id,
|
||||
"event": base64.b64encode(stub).decode("utf-8"),
|
||||
},
|
||||
run_at=run_at if run_at is not None else arrow.now(),
|
||||
commit=True,
|
||||
)
|
@ -1,6 +1,6 @@
|
||||
import dataclasses
|
||||
import secrets
|
||||
import random
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
import arrow
|
||||
|
||||
@ -12,10 +12,13 @@ from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
send_email,
|
||||
render,
|
||||
get_email_domain_part,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import User, Mailbox, Job, MailboxActivation
|
||||
from app.models import User, Mailbox, Job, MailboxActivation, Alias
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import canonicalize_email, sanitize_email
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@ -35,8 +38,9 @@ class OnlyPaidError(MailboxError):
|
||||
|
||||
|
||||
class CannotVerifyError(MailboxError):
|
||||
def __init__(self, msg: str):
|
||||
def __init__(self, msg: str, deleted_activation_code: bool = False):
|
||||
self.msg = msg
|
||||
self.deleted_activation_code = deleted_activation_code
|
||||
|
||||
|
||||
MAX_ACTIVATION_TRIES = 3
|
||||
@ -50,29 +54,22 @@ def create_mailbox(
|
||||
use_digit_codes: bool = False,
|
||||
send_link: bool = True,
|
||||
) -> CreateMailboxOutput:
|
||||
email = sanitize_email(email)
|
||||
if not user.is_premium():
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but is not premium"
|
||||
)
|
||||
raise OnlyPaidError()
|
||||
if not is_valid_email(email):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
||||
)
|
||||
raise MailboxError("Invalid email")
|
||||
elif mailbox_already_used(email, user):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but email is already used"
|
||||
)
|
||||
raise MailboxError("Email already used")
|
||||
elif not email_can_be_used_as_mailbox(email):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
||||
)
|
||||
raise MailboxError("Invalid email")
|
||||
new_mailbox = Mailbox.create(
|
||||
check_email_for_mailbox(email, user)
|
||||
new_mailbox: Mailbox = Mailbox.create(
|
||||
email=email, user_id=user.id, verified=verified, commit=True
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateMailbox,
|
||||
message=f"Create mailbox {new_mailbox.id} ({new_mailbox.email}). Verified={verified}",
|
||||
commit=True,
|
||||
)
|
||||
|
||||
if verified:
|
||||
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
|
||||
@ -95,8 +92,29 @@ def create_mailbox(
|
||||
return output
|
||||
|
||||
|
||||
def check_email_for_mailbox(email, user):
|
||||
if not is_valid_email(email):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but is not valid email"
|
||||
)
|
||||
raise MailboxError("Invalid email")
|
||||
elif mailbox_already_used(email, user):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but email is already used"
|
||||
)
|
||||
raise MailboxError("Email already used")
|
||||
elif not email_can_be_used_as_mailbox(email):
|
||||
LOG.i(
|
||||
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
||||
)
|
||||
raise MailboxError("Invalid email")
|
||||
|
||||
|
||||
def delete_mailbox(
|
||||
user: User, mailbox_id: int, transfer_mailbox_id: Optional[int]
|
||||
user: User,
|
||||
mailbox_id: int,
|
||||
transfer_mailbox_id: Optional[int],
|
||||
send_mail: bool = True,
|
||||
) -> Mailbox:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
@ -129,7 +147,7 @@ def delete_mailbox(
|
||||
|
||||
if not transfer_mailbox.verified:
|
||||
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
|
||||
MailboxError("Your new mailbox is not verified")
|
||||
raise MailboxError("Your new mailbox is not verified")
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.i(
|
||||
@ -142,6 +160,7 @@ def delete_mailbox(
|
||||
"transfer_mailbox_id": transfer_mailbox_id
|
||||
if transfer_mailbox_id and transfer_mailbox_id > 0
|
||||
else None,
|
||||
"send_mail": send_mail,
|
||||
},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
@ -163,17 +182,17 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
if mailbox.verified:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
||||
)
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
return mailbox
|
||||
if mailbox.user_id != user.id:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
if mailbox.verified and not mailbox.new_email:
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
||||
)
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
return mailbox
|
||||
|
||||
activation = (
|
||||
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
|
||||
@ -188,7 +207,10 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
||||
if activation.tries >= MAX_ACTIVATION_TRIES:
|
||||
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
raise CannotVerifyError("Invalid activation code. Please request another code.")
|
||||
raise CannotVerifyError(
|
||||
"Invalid activation code. Please request another code.",
|
||||
deleted_activation_code=True,
|
||||
)
|
||||
if activation.created_at < arrow.now().shift(minutes=-15):
|
||||
LOG.i(
|
||||
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
|
||||
@ -202,8 +224,34 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
||||
activation.tries = activation.tries + 1
|
||||
Session.commit()
|
||||
raise CannotVerifyError("Invalid activation code")
|
||||
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
||||
mailbox.verified = True
|
||||
if mailbox.new_email:
|
||||
LOG.i(
|
||||
f"User {user} has verified mailbox email change from {mailbox.email} to {mailbox.new_email}"
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
||||
)
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
mailbox.verified = True
|
||||
elif not mailbox.verified:
|
||||
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
||||
mailbox.verified = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.VerifyMailbox,
|
||||
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
raise MailboxError("That addres is already in use")
|
||||
|
||||
else:
|
||||
LOG.i(
|
||||
"User {user} alread has mailbox {mailbox} verified and no pending email change"
|
||||
)
|
||||
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
return mailbox
|
||||
|
||||
@ -213,7 +261,10 @@ def generate_activation_code(
|
||||
) -> MailboxActivation:
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
if use_digit_code:
|
||||
code = "{:06d}".format(random.randint(1, 999999))
|
||||
if config.MAILBOX_VERIFICATION_OVERRIDE_CODE:
|
||||
code = config.MAILBOX_VERIFICATION_OVERRIDE_CODE
|
||||
else:
|
||||
code = "{:06d}".format(secrets.randbelow(1000000))[:6]
|
||||
else:
|
||||
code = secrets.token_urlsafe(16)
|
||||
return MailboxActivation.create(
|
||||
@ -225,7 +276,10 @@ def generate_activation_code(
|
||||
|
||||
|
||||
def send_verification_email(
|
||||
user: User, mailbox: Mailbox, activation: MailboxActivation, send_link: bool = True
|
||||
user: User,
|
||||
mailbox: Mailbox,
|
||||
activation: MailboxActivation,
|
||||
send_link: bool = True,
|
||||
):
|
||||
LOG.i(
|
||||
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
|
||||
@ -258,3 +312,190 @@ def send_verification_email(
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def send_change_email(user: User, mailbox: Mailbox, activation: MailboxActivation):
|
||||
verification_url = f"{config.URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox.id}&code={activation.code}"
|
||||
|
||||
send_email(
|
||||
mailbox.new_email,
|
||||
"Confirm mailbox change on SimpleLogin",
|
||||
render(
|
||||
"transactional/verify-mailbox-change.txt.jinja2",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=mailbox.new_email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox-change.html",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
mailbox_new_email=mailbox.new_email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def request_mailbox_email_change(
|
||||
user: User,
|
||||
mailbox: Mailbox,
|
||||
new_email: str,
|
||||
email_ownership_verified: bool = False,
|
||||
send_email: bool = True,
|
||||
use_digit_codes: bool = False,
|
||||
) -> CreateMailboxOutput:
|
||||
new_email = sanitize_email(new_email)
|
||||
if new_email == mailbox.email:
|
||||
raise MailboxError("Same email")
|
||||
check_email_for_mailbox(new_email, user)
|
||||
if email_ownership_verified:
|
||||
mailbox.email = new_email
|
||||
else:
|
||||
mailbox.new_email = new_email
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Updated mailbox {mailbox.id} email ({new_email}) pre-verified({email_ownership_verified}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if email_ownership_verified:
|
||||
LOG.i(f"User {user} as created a pre-verified mailbox with {new_email}")
|
||||
return CreateMailboxOutput(mailbox=mailbox, activation=None)
|
||||
|
||||
LOG.i(f"User {user} has updated mailbox email with {new_email}")
|
||||
activation = generate_activation_code(mailbox, use_digit_code=use_digit_codes)
|
||||
output = CreateMailboxOutput(mailbox=mailbox, activation=activation)
|
||||
|
||||
if not send_email:
|
||||
LOG.i(f"Skipping sending validation email for mailbox {mailbox}")
|
||||
return output
|
||||
|
||||
send_change_email(
|
||||
user,
|
||||
mailbox,
|
||||
activation=activation,
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
class MailboxEmailChangeError(Enum):
|
||||
InvalidId = 1
|
||||
EmailAlreadyUsed = 2
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class MailboxEmailChangeResult:
|
||||
error: Optional[MailboxEmailChangeError]
|
||||
message: str
|
||||
message_category: str
|
||||
|
||||
|
||||
def perform_mailbox_email_change(mailbox_id: int) -> MailboxEmailChangeResult:
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
|
||||
# new_email can be None if user cancels change in the meantime
|
||||
if mailbox and mailbox.new_email:
|
||||
user = mailbox.user
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
return MailboxEmailChangeResult(
|
||||
error=MailboxEmailChangeError.EmailAlreadyUsed,
|
||||
message=f"{mailbox.new_email} is already used",
|
||||
message_category="error",
|
||||
)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
||||
)
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
|
||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox change %s is verified", mailbox)
|
||||
return MailboxEmailChangeResult(
|
||||
error=None,
|
||||
message=f"The {mailbox.email} is updated",
|
||||
message_category="success",
|
||||
)
|
||||
else:
|
||||
return MailboxEmailChangeResult(
|
||||
error=MailboxEmailChangeError.InvalidId,
|
||||
message="Invalid link",
|
||||
message_category="error",
|
||||
)
|
||||
|
||||
|
||||
def cancel_email_change(mailbox_id: int, user: User):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
LOG.i(
|
||||
f"User {user} has tried to cancel a mailbox an unknown mailbox {mailbox_id}"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
if mailbox.user.id != user.id:
|
||||
LOG.i(
|
||||
f"User {user} has tried to cancel a mailbox {mailbox} owned by another user"
|
||||
)
|
||||
raise MailboxError("Invalid mailbox")
|
||||
mailbox.new_email = None
|
||||
LOG.i(f"User {mailbox.user} has cancelled mailbox email change")
|
||||
clear_activation_codes_for_mailbox(mailbox)
|
||||
|
||||
|
||||
def __get_alias_mailbox_from_email(
|
||||
email_address: str, alias: Alias
|
||||
) -> Optional[Mailbox]:
|
||||
for mailbox in alias.mailboxes:
|
||||
if mailbox.email == email_address:
|
||||
return mailbox
|
||||
|
||||
for authorized_address in mailbox.authorized_addresses:
|
||||
if authorized_address.email == email_address:
|
||||
LOG.d(
|
||||
"Found an authorized address for %s %s %s",
|
||||
alias,
|
||||
mailbox,
|
||||
authorized_address,
|
||||
)
|
||||
return mailbox
|
||||
return None
|
||||
|
||||
|
||||
def __get_alias_mailbox_from_email_or_canonical_email(
|
||||
email_address: str, alias: Alias
|
||||
) -> Optional[Mailbox]:
|
||||
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
||||
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
||||
mbox = __get_alias_mailbox_from_email(email_address, alias)
|
||||
if mbox is not None:
|
||||
return mbox
|
||||
canonical_email = canonicalize_email(email_address)
|
||||
if canonical_email != email_address:
|
||||
return __get_alias_mailbox_from_email(canonical_email, alias)
|
||||
return None
|
||||
|
||||
|
||||
def get_mailbox_for_reply_phase(
|
||||
envelope_mail_from: str, header_mail_from: str, alias
|
||||
) -> Optional[Mailbox]:
|
||||
"""return the corresponding mailbox given the mail_from and alias
|
||||
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
||||
"""
|
||||
mbox = __get_alias_mailbox_from_email_or_canonical_email(envelope_mail_from, alias)
|
||||
if mbox is not None:
|
||||
return mbox
|
||||
if not header_mail_from:
|
||||
return None
|
||||
envelope_from_domain = get_email_domain_part(envelope_mail_from)
|
||||
header_from_domain = get_email_domain_part(header_mail_from)
|
||||
if envelope_from_domain != header_from_domain:
|
||||
return None
|
||||
# For services that use VERP sending (envelope from has encoded data to account for bounces)
|
||||
# if the domain is the same in the header from as the envelope from we can use the header from
|
||||
return __get_alias_mailbox_from_email_or_canonical_email(header_mail_from, alias)
|
||||
|
@ -24,6 +24,7 @@ from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy_utils import ArrowType
|
||||
|
||||
@ -157,6 +158,8 @@ class File(Base, ModelMixin):
|
||||
path = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_file_user_id", "user_id"),)
|
||||
|
||||
def get_url(self, expires_in=3600):
|
||||
return s3.get_url(self.path, expires_in)
|
||||
|
||||
@ -318,6 +321,8 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
||||
|
||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_hibp_notified_alias_user_id", "user_id"),)
|
||||
|
||||
|
||||
class Fido(Base, ModelMixin):
|
||||
__tablename__ = "fido"
|
||||
@ -332,11 +337,13 @@ class Fido(Base, ModelMixin):
|
||||
name = sa.Column(sa.String(128), nullable=False, unique=False)
|
||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_fido_user_id", "user_id"),)
|
||||
|
||||
|
||||
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
__tablename__ = "users"
|
||||
|
||||
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
|
||||
FLAG_FREE_DISABLE_CREATE_CONTACTS = 1 << 0
|
||||
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
||||
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
||||
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
||||
@ -543,7 +550,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
# bitwise flags. Allow for future expansion
|
||||
flags = sa.Column(
|
||||
sa.BigInteger,
|
||||
default=FLAG_FREE_DISABLE_CREATE_ALIAS,
|
||||
default=FLAG_FREE_DISABLE_CREATE_CONTACTS,
|
||||
server_default="0",
|
||||
nullable=False,
|
||||
)
|
||||
@ -564,6 +571,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||
),
|
||||
sa.Index("ix_users_delete_on", delete_on),
|
||||
sa.Index("ix_users_default_mailbox_id", default_mailbox_id),
|
||||
sa.Index(
|
||||
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
|
||||
),
|
||||
sa.Index("ix_users_profile_picture_id", profile_picture_id),
|
||||
)
|
||||
|
||||
@property
|
||||
@ -616,10 +628,19 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
if "alternative_id" not in kwargs:
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
trail = ". Created from partner" if from_partner else ""
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateUser,
|
||||
message=f"Created user {email}{trail}",
|
||||
)
|
||||
|
||||
# If the user is created from partner, do not notify
|
||||
# nor give a trial
|
||||
if from_partner:
|
||||
user.flags = User.FLAG_CREATED_FROM_PARTNER
|
||||
user.flags = user.flags | User.FLAG_CREATED_FROM_PARTNER
|
||||
user.notification = False
|
||||
user.trial_end = None
|
||||
Job.create(
|
||||
@ -973,7 +994,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
def has_custom_domain(self):
|
||||
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
|
||||
|
||||
def custom_domains(self):
|
||||
def custom_domains(self) -> List["CustomDomain"]:
|
||||
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
||||
|
||||
def available_domains_for_random_alias(
|
||||
@ -1168,7 +1189,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
def can_create_contacts(self) -> bool:
|
||||
if self.is_premium():
|
||||
return True
|
||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
|
||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_CONTACTS == 0:
|
||||
return True
|
||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||
|
||||
@ -1211,6 +1232,8 @@ class ActivationCode(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||
|
||||
__table_args__ = (sa.Index("ix_activation_code_user_id", "user_id"),)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1227,6 +1250,8 @@ class ResetPasswordCode(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||
|
||||
__table_args__ = (sa.Index("ix_reset_password_code_user_id", "user_id"),)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1269,6 +1294,8 @@ class MfaBrowser(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (sa.Index("ix_mfa_browser_user_id", "user_id"),)
|
||||
|
||||
@classmethod
|
||||
def create_new(cls, user, token_length=64) -> "MfaBrowser":
|
||||
found = False
|
||||
@ -1327,6 +1354,12 @@ class Client(Base, ModelMixin):
|
||||
user = orm.relationship(User)
|
||||
referral = orm.relationship("Referral")
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_client_user_id", "user_id"),
|
||||
sa.Index("ix_client_icon_id", "icon_id"),
|
||||
sa.Index("ix_client_referral_id", "referral_id"),
|
||||
)
|
||||
|
||||
def nb_user(self):
|
||||
return ClientUser.filter_by(client_id=self.id).count()
|
||||
|
||||
@ -1375,6 +1408,8 @@ class RedirectUri(Base, ModelMixin):
|
||||
|
||||
client = orm.relationship(Client, backref="redirect_uris")
|
||||
|
||||
__table_args__ = (sa.Index("ix_redirect_uri_client_id", "client_id"),)
|
||||
|
||||
|
||||
class AuthorizationCode(Base, ModelMixin):
|
||||
__tablename__ = "authorization_code"
|
||||
@ -1396,6 +1431,11 @@ class AuthorizationCode(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_authorization_code_client_id", "client_id"),
|
||||
sa.Index("ix_authorization_code_user_id", "user_id"),
|
||||
)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1418,6 +1458,11 @@ class OauthToken(Base, ModelMixin):
|
||||
|
||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_oauth_token_user_id", "user_id"),
|
||||
sa.Index("ix_oauth_token_client_id", "client_id"),
|
||||
)
|
||||
|
||||
def is_expired(self):
|
||||
return self.expired < arrow.now()
|
||||
|
||||
@ -1571,6 +1616,7 @@ class Alias(Base, ModelMixin):
|
||||
postgresql_ops={"note": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
),
|
||||
Index("ix_alias_original_owner_id", "original_owner_id"),
|
||||
)
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
@ -1613,7 +1659,7 @@ class Alias(Base, ModelMixin):
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_custom_domain(alias_address) -> Optional["CustomDomain"]:
|
||||
def get_custom_domain(alias_address: str) -> Optional["CustomDomain"]:
|
||||
alias_domain = validate_email(
|
||||
alias_address, check_deliverability=False, allow_smtputf8=False
|
||||
).domain
|
||||
@ -1656,22 +1702,15 @@ class Alias(Base, ModelMixin):
|
||||
custom_domain = Alias.get_custom_domain(email)
|
||||
if custom_domain:
|
||||
new_alias.custom_domain_id = custom_domain.id
|
||||
else:
|
||||
custom_domain = CustomDomain.get(kw["custom_domain_id"])
|
||||
# If it comes from a custom domain created from partner. Mark it as created from partner
|
||||
if custom_domain is not None and custom_domain.partner_id is not None:
|
||||
new_alias.flags = (new_alias.flags or 0) | Alias.FLAG_PARTNER_CREATED
|
||||
|
||||
Session.add(new_alias)
|
||||
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
||||
|
||||
# Internal import to avoid global import cycles
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
||||
|
||||
event = AliasCreated(
|
||||
alias_id=new_alias.id,
|
||||
alias_email=new_alias.email,
|
||||
alias_note=new_alias.note,
|
||||
enabled=True,
|
||||
)
|
||||
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
||||
|
||||
if (
|
||||
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
|
||||
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
|
||||
@ -1684,6 +1723,23 @@ class Alias(Base, ModelMixin):
|
||||
if flush:
|
||||
Session.flush()
|
||||
|
||||
# Internal import to avoid global import cycles
|
||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
||||
|
||||
event = AliasCreated(
|
||||
id=new_alias.id,
|
||||
email=new_alias.email,
|
||||
note=new_alias.note,
|
||||
enabled=True,
|
||||
created_at=int(new_alias.created_at.timestamp),
|
||||
)
|
||||
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
||||
emit_alias_audit_log(
|
||||
new_alias, AliasAuditLogAction.CreateAlias, "New alias created"
|
||||
)
|
||||
|
||||
return new_alias
|
||||
|
||||
@classmethod
|
||||
@ -1862,6 +1918,8 @@ class Contact(Base, ModelMixin):
|
||||
|
||||
MAX_NAME_LENGTH = 512
|
||||
|
||||
FLAG_PARTNER_CREATED = 1 << 0
|
||||
|
||||
__tablename__ = "contact"
|
||||
|
||||
__table_args__ = (
|
||||
@ -1920,6 +1978,9 @@ class Contact(Base, ModelMixin):
|
||||
# whether contact is created automatically during the forward phase
|
||||
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
|
||||
|
||||
# contact flags
|
||||
flags = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
||||
|
||||
@property
|
||||
def email(self):
|
||||
return self.website_email
|
||||
@ -2049,7 +2110,12 @@ class Contact(Base, ModelMixin):
|
||||
|
||||
class EmailLog(Base, ModelMixin):
|
||||
__tablename__ = "email_log"
|
||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||
__table_args__ = (
|
||||
Index("ix_email_log_created_at", "created_at"),
|
||||
Index("ix_email_log_mailbox_id", "mailbox_id"),
|
||||
Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
|
||||
Index("ix_email_log_refused_email_id", "refused_email_id"),
|
||||
)
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
@ -2325,6 +2391,7 @@ class AliasUsedOn(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||
sa.Index("ix_alias_used_on_user_id", "user_id"),
|
||||
)
|
||||
|
||||
alias_id = sa.Column(
|
||||
@ -2351,6 +2418,11 @@ class ApiKey(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_api_key_code", "code"),
|
||||
sa.Index("ix_api_key_user_id", "user_id"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create(cls, user_id, name=None, **kwargs):
|
||||
code = random_string(60)
|
||||
@ -2418,6 +2490,18 @@ class CustomDomain(Base, ModelMixin):
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
partner_id = sa.Column(
|
||||
sa.Integer,
|
||||
sa.ForeignKey("partner.id"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
server_default=None,
|
||||
)
|
||||
|
||||
pending_deletion = sa.Column(
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"ix_unique_domain", # Index name
|
||||
@ -2425,6 +2509,8 @@ class CustomDomain(Base, ModelMixin):
|
||||
unique=True,
|
||||
postgresql_where=Column("ownership_verified"),
|
||||
), # The condition
|
||||
Index("ix_custom_domain_user_id", "user_id"),
|
||||
Index("ix_custom_domain_pending_deletion", "pending_deletion"),
|
||||
)
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
|
||||
@ -2442,9 +2528,6 @@ class CustomDomain(Base, ModelMixin):
|
||||
def get_trash_url(self):
|
||||
return config.URL + f"/dashboard/domains/{self.id}/trash"
|
||||
|
||||
def get_ownership_dns_txt_value(self):
|
||||
return f"sl-verification={self.ownership_txt_token}"
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
domain = kwargs.get("domain")
|
||||
@ -2498,6 +2581,7 @@ class AutoCreateRule(Base, ModelMixin):
|
||||
sa.UniqueConstraint(
|
||||
"custom_domain_id", "order", name="uq_auto_create_rule_order"
|
||||
),
|
||||
sa.Index("ix_auto_create_rule_custom_domain_id", "custom_domain_id"),
|
||||
)
|
||||
|
||||
custom_domain_id = sa.Column(
|
||||
@ -2541,6 +2625,7 @@ class DomainDeletedAlias(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
|
||||
sa.Index("ix_domain_deleted_alias_user_id", "user_id"),
|
||||
)
|
||||
|
||||
email = sa.Column(sa.String(256), nullable=False)
|
||||
@ -2601,6 +2686,8 @@ class Coupon(Base, ModelMixin):
|
||||
# a coupon can have an expiration
|
||||
expires_date = sa.Column(ArrowType, nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_coupon_used_by_user_id", "used_by_user_id"),)
|
||||
|
||||
|
||||
class Directory(Base, ModelMixin):
|
||||
__tablename__ = "directory"
|
||||
@ -2615,6 +2702,8 @@ class Directory(Base, ModelMixin):
|
||||
"Mailbox", secondary="directory_mailbox", lazy="joined"
|
||||
)
|
||||
|
||||
__table_args__ = (sa.Index("ix_directory_user_id", "user_id"),)
|
||||
|
||||
@property
|
||||
def mailboxes(self):
|
||||
if self._mailboxes:
|
||||
@ -2716,7 +2805,10 @@ class Mailbox(Base, ModelMixin):
|
||||
|
||||
generic_subject = sa.Column(sa.String(78), nullable=True)
|
||||
|
||||
__table_args__ = (sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),)
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
|
||||
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
|
||||
)
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
|
||||
@ -2749,9 +2841,9 @@ class Mailbox(Base, ModelMixin):
|
||||
|
||||
from app.email_utils import get_email_local_part
|
||||
|
||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
||||
mx_domains = get_mx_domains(get_email_local_part(self.email))
|
||||
# Proton is the first domain
|
||||
if mx_domains and mx_domains[0][1] in (
|
||||
if mx_domains and mx_domains[0].domain in (
|
||||
"mail.protonmail.ch.",
|
||||
"mailsec.protonmail.ch.",
|
||||
):
|
||||
@ -2853,6 +2945,8 @@ class RefusedEmail(Base, ModelMixin):
|
||||
# toggle this when email content (stored at full_report_path & path are deleted)
|
||||
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
__table_args__ = (sa.Index("ix_refused_email_user_id", "user_id"),)
|
||||
|
||||
def get_url(self, expires_in=3600):
|
||||
if self.path:
|
||||
return s3.get_url(self.path, expires_in)
|
||||
@ -2875,6 +2969,8 @@ class Referral(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
|
||||
|
||||
__table_args__ = (sa.Index("ix_referral_user_id", "user_id"),)
|
||||
|
||||
@property
|
||||
def nb_user(self) -> int:
|
||||
return User.filter_by(referral_id=self.id, activated=True).count()
|
||||
@ -2914,6 +3010,8 @@ class SentAlert(Base, ModelMixin):
|
||||
to_email = sa.Column(sa.String(256), nullable=False)
|
||||
alert_type = sa.Column(sa.String(256), nullable=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_sent_alert_user_id", "user_id"),)
|
||||
|
||||
|
||||
class AliasMailbox(Base, ModelMixin):
|
||||
__tablename__ = "alias_mailbox"
|
||||
@ -3159,6 +3257,11 @@ class BatchImport(Base, ModelMixin):
|
||||
file = orm.relationship(File)
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_batch_import_file_id", "file_id"),
|
||||
sa.Index("ix_batch_import_user_id", "user_id"),
|
||||
)
|
||||
|
||||
def nb_alias(self):
|
||||
return Alias.filter_by(batch_import_id=self.id).count()
|
||||
|
||||
@ -3179,6 +3282,7 @@ class AuthorizedAddress(Base, ModelMixin):
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
|
||||
sa.Index("ix_authorized_address_user_id", "user_id"),
|
||||
)
|
||||
|
||||
mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
|
||||
@ -3320,6 +3424,8 @@ class Payout(Base, ModelMixin):
|
||||
|
||||
user = orm.relationship(User)
|
||||
|
||||
__table_args__ = (sa.Index("ix_payout_user_id", "user_id"),)
|
||||
|
||||
|
||||
class IgnoredEmail(Base, ModelMixin):
|
||||
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
|
||||
@ -3421,6 +3527,8 @@ class PhoneReservation(Base, ModelMixin):
|
||||
start = sa.Column(ArrowType, nullable=False)
|
||||
end = sa.Column(ArrowType, nullable=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_phone_reservation_user_id", "user_id"),)
|
||||
|
||||
|
||||
class PhoneMessage(Base, ModelMixin):
|
||||
__tablename__ = "phone_message"
|
||||
@ -3595,6 +3703,11 @@ class ProviderComplaint(Base, ModelMixin):
|
||||
user = orm.relationship(User, foreign_keys=[user_id])
|
||||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_provider_complaint_user_id", "user_id"),
|
||||
sa.Index("ix_provider_complaint_refused_email_id", "refused_email_id"),
|
||||
)
|
||||
|
||||
|
||||
class PartnerApiToken(Base, ModelMixin):
|
||||
__tablename__ = "partner_api_token"
|
||||
@ -3665,7 +3778,8 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
)
|
||||
|
||||
# when the partner subscription ends
|
||||
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
||||
end_at = sa.Column(ArrowType, nullable=True, index=True)
|
||||
lifetime = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||
|
||||
partner_user = orm.relationship(PartnerUser)
|
||||
|
||||
@ -3687,7 +3801,9 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
return None
|
||||
|
||||
def is_active(self):
|
||||
return self.end_at > arrow.now().shift(days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS)
|
||||
return self.lifetime or self.end_at > arrow.now().shift(
|
||||
days=-_PARTNER_SUBSCRIPTION_GRACE_DAYS
|
||||
)
|
||||
|
||||
|
||||
# endregion
|
||||
@ -3718,6 +3834,8 @@ class NewsletterUser(Base, ModelMixin):
|
||||
user = orm.relationship(User)
|
||||
newsletter = orm.relationship(Newsletter)
|
||||
|
||||
__table_args__ = (sa.Index("ix_newsletter_user_user_id", "user_id"),)
|
||||
|
||||
|
||||
class ApiToCookieToken(Base, ModelMixin):
|
||||
__tablename__ = "api_cookie_token"
|
||||
@ -3728,6 +3846,11 @@ class ApiToCookieToken(Base, ModelMixin):
|
||||
user = orm.relationship(User)
|
||||
api_key = orm.relationship(ApiKey)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_api_to_cookie_token_api_key_id", "api_key_id"),
|
||||
sa.Index("ix_api_to_cookie_token_user_id", "user_id"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
code = secrets.token_urlsafe(32)
|
||||
@ -3750,17 +3873,19 @@ class SyncEvent(Base, ModelMixin):
|
||||
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
||||
)
|
||||
|
||||
def mark_as_taken(self) -> bool:
|
||||
sql = """
|
||||
UPDATE sync_event
|
||||
SET taken_time = :taken_time
|
||||
WHERE id = :sync_event_id
|
||||
AND taken_time IS NULL
|
||||
"""
|
||||
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||
|
||||
res = Session.execute(sql, args)
|
||||
Session.commit()
|
||||
def mark_as_taken(self, allow_taken_older_than: Optional[Arrow] = None) -> bool:
|
||||
try:
|
||||
taken_condition = ["taken_time IS NULL"]
|
||||
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||
if allow_taken_older_than:
|
||||
taken_condition.append("taken_time < :taken_older_than")
|
||||
args["taken_older_than"] = allow_taken_older_than.datetime
|
||||
sql_taken_condition = "({})".format(" OR ".join(taken_condition))
|
||||
sql = f"UPDATE sync_event SET taken_time = :taken_time WHERE id = :sync_event_id AND {sql_taken_condition}"
|
||||
res = Session.execute(sql, args)
|
||||
Session.commit()
|
||||
except ObjectDeletedError:
|
||||
return False
|
||||
|
||||
return res.rowcount > 0
|
||||
|
||||
@ -3784,3 +3909,39 @@ class SyncEvent(Base, ModelMixin):
|
||||
.limit(100)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
class AliasAuditLog(Base, ModelMixin):
|
||||
"""This model holds an audit log for all the actions performed to an alias"""
|
||||
|
||||
__tablename__ = "alias_audit_log"
|
||||
|
||||
user_id = sa.Column(sa.Integer, nullable=False)
|
||||
alias_id = sa.Column(sa.Integer, nullable=False)
|
||||
alias_email = sa.Column(sa.String(255), nullable=False)
|
||||
action = sa.Column(sa.String(255), nullable=False)
|
||||
message = sa.Column(sa.Text, default=None, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_alias_audit_log_user_id", "user_id"),
|
||||
sa.Index("ix_alias_audit_log_alias_id", "alias_id"),
|
||||
sa.Index("ix_alias_audit_log_alias_email", "alias_email"),
|
||||
sa.Index("ix_alias_audit_log_created_at", "created_at"),
|
||||
)
|
||||
|
||||
|
||||
class UserAuditLog(Base, ModelMixin):
|
||||
"""This model holds an audit log for all the actions performed by a user"""
|
||||
|
||||
__tablename__ = "user_audit_log"
|
||||
|
||||
user_id = sa.Column(sa.Integer, nullable=False)
|
||||
user_email = sa.Column(sa.String(255), nullable=False)
|
||||
action = sa.Column(sa.String(255), nullable=False)
|
||||
message = sa.Column(sa.Text, default=None, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("ix_user_audit_log_user_id", "user_id"),
|
||||
sa.Index("ix_user_audit_log_user_email", "user_email"),
|
||||
sa.Index("ix_user_audit_log_created_at", "created_at"),
|
||||
)
|
||||
|
55
app/app/partner_user_utils.py
Normal file
55
app/app/partner_user_utils.py
Normal file
@ -0,0 +1,55 @@
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from arrow import Arrow
|
||||
|
||||
from app import config
|
||||
from app.models import PartnerUser, PartnerSubscription, User, Job
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def create_partner_user(
|
||||
user: User, partner_id: int, partner_email: str, external_user_id: str
|
||||
) -> PartnerUser:
|
||||
instance = PartnerUser.create(
|
||||
user_id=user.id,
|
||||
partner_id=partner_id,
|
||||
partner_email=partner_email,
|
||||
external_user_id=external_user_id,
|
||||
)
|
||||
Job.create(
|
||||
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
|
||||
payload={"user_id": user.id},
|
||||
run_at=arrow.now(),
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.LinkAccount,
|
||||
message=f"Linked account to partner_id={partner_id} | partner_email={partner_email} | external_user_id={external_user_id}",
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
def create_partner_subscription(
|
||||
partner_user: PartnerUser,
|
||||
expiration: Optional[Arrow] = None,
|
||||
lifetime: bool = False,
|
||||
msg: Optional[str] = None,
|
||||
) -> PartnerSubscription:
|
||||
instance = PartnerSubscription.create(
|
||||
partner_user_id=partner_user.id,
|
||||
end_at=expiration,
|
||||
lifetime=lifetime,
|
||||
)
|
||||
|
||||
message = "User upgraded through partner subscription"
|
||||
if msg:
|
||||
message += f" | {msg}"
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=message,
|
||||
)
|
||||
|
||||
return instance
|
0
app/app/payments/__init__.py
Normal file
0
app/app/payments/__init__.py
Normal file
121
app/app/payments/coinbase.py
Normal file
121
app/app/payments/coinbase.py
Normal file
@ -0,0 +1,121 @@
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
|
||||
from coinbase_commerce.error import WebhookInvalidPayload, SignatureVerificationError
|
||||
from coinbase_commerce.webhook import Webhook
|
||||
from flask import Flask, request
|
||||
|
||||
from app.config import COINBASE_WEBHOOK_SECRET
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.log import LOG
|
||||
from app.models import CoinbaseSubscription, User
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def setup_coinbase_commerce(app: Flask):
|
||||
@app.route("/coinbase", methods=["POST"])
|
||||
def coinbase_webhook():
|
||||
# event payload
|
||||
request_data = request.data.decode("utf-8")
|
||||
# webhook signature
|
||||
request_sig = request.headers.get("X-CC-Webhook-Signature", None)
|
||||
|
||||
try:
|
||||
# signature verification and event object construction
|
||||
event = Webhook.construct_event(
|
||||
request_data, request_sig, COINBASE_WEBHOOK_SECRET
|
||||
)
|
||||
except (WebhookInvalidPayload, SignatureVerificationError) as e:
|
||||
LOG.e("Invalid Coinbase webhook")
|
||||
return str(e), 400
|
||||
|
||||
LOG.d("Coinbase event %s", event)
|
||||
|
||||
if event["type"] == "charge:confirmed":
|
||||
if handle_coinbase_event(event):
|
||||
return "success", 200
|
||||
else:
|
||||
return "error", 400
|
||||
|
||||
return "success", 200
|
||||
|
||||
|
||||
def handle_coinbase_event(event) -> bool:
|
||||
server_user_id = event["data"]["metadata"]["user_id"]
|
||||
try:
|
||||
user_id = int(server_user_id)
|
||||
except ValueError:
|
||||
user_id = int(float(server_user_id))
|
||||
|
||||
code = event["data"]["code"]
|
||||
user: Optional[User] = User.get(user_id)
|
||||
if not user:
|
||||
LOG.e("User not found %s", user_id)
|
||||
return False
|
||||
|
||||
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
|
||||
user_id=user_id
|
||||
)
|
||||
|
||||
if not coinbase_subscription:
|
||||
LOG.d("Create a coinbase subscription for %s", user)
|
||||
coinbase_subscription = CoinbaseSubscription.create(
|
||||
user_id=user_id, end_at=arrow.now().shift(years=1), code=code, commit=True
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message="Upgraded though Coinbase",
|
||||
commit=True,
|
||||
)
|
||||
send_email(
|
||||
user.email,
|
||||
"Your SimpleLogin account has been upgraded",
|
||||
render(
|
||||
"transactional/coinbase/new-subscription.txt",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
render(
|
||||
"transactional/coinbase/new-subscription.html",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
)
|
||||
else:
|
||||
if coinbase_subscription.code != code:
|
||||
LOG.d("Update code from %s to %s", coinbase_subscription.code, code)
|
||||
coinbase_subscription.code = code
|
||||
|
||||
if coinbase_subscription.is_active():
|
||||
coinbase_subscription.end_at = coinbase_subscription.end_at.shift(years=1)
|
||||
else: # already expired subscription
|
||||
coinbase_subscription.end_at = arrow.now().shift(years=1)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended coinbase subscription",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
"Your SimpleLogin account has been extended",
|
||||
render(
|
||||
"transactional/coinbase/extend-subscription.txt",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
render(
|
||||
"transactional/coinbase/extend-subscription.html",
|
||||
user=user,
|
||||
coinbase_subscription=coinbase_subscription,
|
||||
),
|
||||
)
|
||||
execute_subscription_webhook(user)
|
||||
|
||||
return True
|
286
app/app/payments/paddle.py
Normal file
286
app/app/payments/paddle.py
Normal file
@ -0,0 +1,286 @@
|
||||
import arrow
|
||||
import json
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
|
||||
from flask import Flask, request
|
||||
|
||||
from app import paddle_utils, paddle_callback
|
||||
from app.config import (
|
||||
PADDLE_MONTHLY_PRODUCT_ID,
|
||||
PADDLE_MONTHLY_PRODUCT_IDS,
|
||||
PADDLE_YEARLY_PRODUCT_IDS,
|
||||
PADDLE_COUPON_ID,
|
||||
)
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.log import LOG
|
||||
from app.models import Subscription, PlanEnum, User, Coupon
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
def setup_paddle_callback(app: Flask):
|
||||
@app.route("/paddle", methods=["GET", "POST"])
|
||||
def paddle():
|
||||
LOG.d(f"paddle callback {request.form.get('alert_name')} {request.form}")
|
||||
|
||||
# make sure the request comes from Paddle
|
||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||
return "KO", 400
|
||||
|
||||
if (
|
||||
request.form.get("alert_name") == "subscription_created"
|
||||
): # new user subscribes
|
||||
# the passthrough is json encoded, e.g.
|
||||
# request.form.get("passthrough") = '{"user_id": 88 }'
|
||||
passthrough = json.loads(request.form.get("passthrough"))
|
||||
user_id = passthrough.get("user_id")
|
||||
user = User.get(user_id)
|
||||
|
||||
subscription_plan_id = int(request.form.get("subscription_plan_id"))
|
||||
|
||||
if subscription_plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
||||
plan = PlanEnum.monthly
|
||||
elif subscription_plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
||||
plan = PlanEnum.yearly
|
||||
else:
|
||||
LOG.e(
|
||||
"Unknown subscription_plan_id %s %s",
|
||||
subscription_plan_id,
|
||||
request.form,
|
||||
)
|
||||
return "No such subscription", 400
|
||||
|
||||
sub = Subscription.get_by(user_id=user.id)
|
||||
|
||||
if not sub:
|
||||
LOG.d(f"create a new Subscription for user {user}")
|
||||
Subscription.create(
|
||||
user_id=user.id,
|
||||
cancel_url=request.form.get("cancel_url"),
|
||||
update_url=request.form.get("update_url"),
|
||||
subscription_id=request.form.get("subscription_id"),
|
||||
event_time=arrow.now(),
|
||||
next_bill_date=arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date(),
|
||||
plan=plan,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message="Upgraded through Paddle",
|
||||
)
|
||||
else:
|
||||
LOG.d(f"Update an existing Subscription for user {user}")
|
||||
sub.cancel_url = request.form.get("cancel_url")
|
||||
sub.update_url = request.form.get("update_url")
|
||||
sub.subscription_id = request.form.get("subscription_id")
|
||||
sub.event_time = arrow.now()
|
||||
sub.next_bill_date = arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date()
|
||||
sub.plan = plan
|
||||
|
||||
# make sure to set the new plan as not-cancelled
|
||||
# in case user cancels a plan and subscribes a new plan
|
||||
sub.cancelled = False
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended Paddle subscription",
|
||||
)
|
||||
|
||||
execute_subscription_webhook(user)
|
||||
LOG.d("User %s upgrades!", user)
|
||||
|
||||
Session.commit()
|
||||
|
||||
elif request.form.get("alert_name") == "subscription_payment_succeeded":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
LOG.d("Update subscription %s", subscription_id)
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
# when user subscribes, the "subscription_payment_succeeded" can arrive BEFORE "subscription_created"
|
||||
# at that time, subscription object does not exist yet
|
||||
if sub:
|
||||
sub.event_time = arrow.now()
|
||||
sub.next_bill_date = arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date()
|
||||
|
||||
Session.commit()
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
elif request.form.get("alert_name") == "subscription_cancelled":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
if sub:
|
||||
# cancellation_effective_date should be the same as next_bill_date
|
||||
LOG.w(
|
||||
"Cancel subscription %s %s on %s, next bill date %s",
|
||||
subscription_id,
|
||||
sub.user,
|
||||
request.form.get("cancellation_effective_date"),
|
||||
sub.next_bill_date,
|
||||
)
|
||||
sub.event_time = arrow.now()
|
||||
|
||||
sub.cancelled = True
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.SubscriptionCancelled,
|
||||
message="Cancelled Paddle subscription",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
user = sub.user
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
"SimpleLogin - your subscription is canceled",
|
||||
render(
|
||||
"transactional/subscription-cancel.txt",
|
||||
user=user,
|
||||
end_date=request.form.get("cancellation_effective_date"),
|
||||
),
|
||||
)
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
else:
|
||||
# user might have deleted their account
|
||||
LOG.i(f"Cancel non-exist subscription {subscription_id}")
|
||||
return "OK"
|
||||
elif request.form.get("alert_name") == "subscription_updated":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
if sub:
|
||||
next_bill_date = request.form.get("next_bill_date")
|
||||
if not next_bill_date:
|
||||
paddle_callback.failed_payment(sub, subscription_id)
|
||||
return "OK"
|
||||
|
||||
LOG.d(
|
||||
"Update subscription %s %s on %s, next bill date %s",
|
||||
subscription_id,
|
||||
sub.user,
|
||||
request.form.get("cancellation_effective_date"),
|
||||
sub.next_bill_date,
|
||||
)
|
||||
if (
|
||||
int(request.form.get("subscription_plan_id"))
|
||||
== PADDLE_MONTHLY_PRODUCT_ID
|
||||
):
|
||||
plan = PlanEnum.monthly
|
||||
else:
|
||||
plan = PlanEnum.yearly
|
||||
|
||||
sub.cancel_url = request.form.get("cancel_url")
|
||||
sub.update_url = request.form.get("update_url")
|
||||
sub.event_time = arrow.now()
|
||||
sub.next_bill_date = arrow.get(
|
||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||
).date()
|
||||
sub.plan = plan
|
||||
|
||||
# make sure to set the new plan as not-cancelled
|
||||
sub.cancelled = False
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended Paddle subscription",
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
execute_subscription_webhook(sub.user)
|
||||
else:
|
||||
LOG.w(
|
||||
f"update non-exist subscription {subscription_id}. {request.form}"
|
||||
)
|
||||
return "No such subscription", 400
|
||||
elif request.form.get("alert_name") == "payment_refunded":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
LOG.d("Refund request for subscription %s", subscription_id)
|
||||
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
|
||||
if sub:
|
||||
user = sub.user
|
||||
Subscription.delete(sub.id)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.SubscriptionCancelled,
|
||||
message="Paddle subscription cancelled as user requested a refund",
|
||||
)
|
||||
Session.commit()
|
||||
LOG.e("%s requests a refund", user)
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
||||
subscription_id = request.form.get("subscription_id")
|
||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||
LOG.d(
|
||||
"Handle subscription_payment_refunded for subscription %s",
|
||||
subscription_id,
|
||||
)
|
||||
|
||||
if not sub:
|
||||
LOG.w(
|
||||
"No such subscription for %s, payload %s",
|
||||
subscription_id,
|
||||
request.form,
|
||||
)
|
||||
return "No such subscription"
|
||||
|
||||
plan_id = int(request.form["subscription_plan_id"])
|
||||
if request.form["refund_type"] == "full":
|
||||
if plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
||||
LOG.d("subtract 1 month from next_bill_date %s", sub.next_bill_date)
|
||||
sub.next_bill_date = sub.next_bill_date - relativedelta(months=1)
|
||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
||||
Session.commit()
|
||||
elif plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
||||
LOG.d("subtract 1 year from next_bill_date %s", sub.next_bill_date)
|
||||
sub.next_bill_date = sub.next_bill_date - relativedelta(years=1)
|
||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
||||
Session.commit()
|
||||
else:
|
||||
LOG.e("Unknown plan_id %s", plan_id)
|
||||
else:
|
||||
LOG.w("partial subscription_payment_refunded, not handled")
|
||||
execute_subscription_webhook(sub.user)
|
||||
|
||||
return "OK"
|
||||
|
||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
||||
def paddle_coupon():
|
||||
LOG.d("paddle coupon callback %s", request.form)
|
||||
|
||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||
return "KO", 400
|
||||
|
||||
product_id = request.form.get("p_product_id")
|
||||
if product_id != PADDLE_COUPON_ID:
|
||||
LOG.e("product_id %s not match with %s", product_id, PADDLE_COUPON_ID)
|
||||
return "KO", 400
|
||||
|
||||
email = request.form.get("email")
|
||||
LOG.d("Paddle coupon request for %s", email)
|
||||
|
||||
coupon = Coupon.create(
|
||||
code=random_string(30),
|
||||
comment="For 1-year coupon",
|
||||
expires_date=arrow.now().shift(years=1, days=-1),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
return (
|
||||
f"Your 1-year coupon is <b>{coupon.code}</b> <br> "
|
||||
f"It's valid until <b>{coupon.expires_date.date().isoformat()}</b>"
|
||||
)
|
@ -16,6 +16,7 @@ PROTON_ERROR_CODE_HV_NEEDED = 9001
|
||||
|
||||
PLAN_FREE = 1
|
||||
PLAN_PREMIUM = 2
|
||||
PLAN_PREMIUM_LIFETIME = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -112,10 +113,13 @@ class HttpProtonClient(ProtonClient):
|
||||
if plan_value == PLAN_FREE:
|
||||
plan = SLPlan(type=SLPlanType.Free, expiration=None)
|
||||
elif plan_value == PLAN_PREMIUM:
|
||||
expiration = info.get("PlanExpiration", "1")
|
||||
plan = SLPlan(
|
||||
type=SLPlanType.Premium,
|
||||
expiration=Arrow.fromtimestamp(info["PlanExpiration"], tzinfo="utc"),
|
||||
expiration=Arrow.fromtimestamp(expiration, tzinfo="utc"),
|
||||
)
|
||||
elif plan_value == PLAN_PREMIUM_LIFETIME:
|
||||
plan = SLPlan(SLPlanType.PremiumLifetime, expiration=None)
|
||||
else:
|
||||
raise Exception(f"Invalid value for plan: {plan_value}")
|
||||
|
||||
|
@ -1,10 +1,12 @@
|
||||
from newrelic import agent
|
||||
from typing import Optional
|
||||
|
||||
from newrelic import agent
|
||||
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.log import LOG
|
||||
from app.models import Partner, PartnerUser, User
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
PROTON_PARTNER_NAME = "Proton"
|
||||
_PROTON_PARTNER: Optional[Partner] = None
|
||||
@ -25,13 +27,25 @@ def is_proton_partner(partner: Partner) -> bool:
|
||||
return partner.name == PROTON_PARTNER_NAME
|
||||
|
||||
|
||||
def perform_proton_account_unlink(current_user: User):
|
||||
def can_unlink_proton_account(user: User) -> bool:
|
||||
return (user.flags & User.FLAG_CREATED_FROM_PARTNER) == 0
|
||||
|
||||
|
||||
def perform_proton_account_unlink(current_user: User) -> bool:
|
||||
if not can_unlink_proton_account(current_user):
|
||||
return False
|
||||
proton_partner = get_proton_partner()
|
||||
partner_user = PartnerUser.get_by(
|
||||
user_id=current_user.id, partner_id=proton_partner.id
|
||||
)
|
||||
if partner_user is not None:
|
||||
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
|
||||
)
|
||||
PartnerUser.delete(partner_user.id)
|
||||
Session.commit()
|
||||
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
||||
return True
|
||||
|
21
app/app/sentry_utils.py
Normal file
21
app/app/sentry_utils.py
Normal file
@ -0,0 +1,21 @@
|
||||
from typing import Optional
|
||||
|
||||
from sentry_sdk.types import Event, Hint
|
||||
|
||||
_HTTP_CODES_TO_IGNORE = [416]
|
||||
|
||||
|
||||
def _should_send(_event: Event, hint: Hint) -> bool:
|
||||
# Check if this is an HTTP Exception event
|
||||
if "exc_info" in hint:
|
||||
exc_type, exc_value, exc_traceback = hint["exc_info"]
|
||||
# Check if it's a Werkzeug HTTPException (raised for HTTP status codes)
|
||||
if hasattr(exc_value, "code") and exc_value.code in _HTTP_CODES_TO_IGNORE:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sentry_before_send(event: Event, hint: Hint) -> Optional[Event]:
|
||||
if _should_send(event, hint):
|
||||
return event
|
||||
return None
|
@ -1,38 +1,16 @@
|
||||
import requests
|
||||
from requests import RequestException
|
||||
|
||||
from app import config
|
||||
from app.db import Session
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
|
||||
|
||||
def execute_subscription_webhook(user: User):
|
||||
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
|
||||
if webhook_url is None:
|
||||
return
|
||||
subscription_end = user.get_active_subscription_end(
|
||||
include_partner_subscription=False
|
||||
)
|
||||
sl_subscription_end = None
|
||||
if subscription_end:
|
||||
sl_subscription_end = subscription_end.timestamp
|
||||
payload = {
|
||||
"user_id": user.id,
|
||||
"is_premium": user.is_premium(),
|
||||
"active_subscription_end": sl_subscription_end,
|
||||
}
|
||||
try:
|
||||
response = requests.post(webhook_url, json=payload, timeout=2)
|
||||
if response.status_code == 200:
|
||||
LOG.i("Sent request to subscription update webhook successfully")
|
||||
else:
|
||||
LOG.i(
|
||||
f"Request to webhook failed with statue {response.status_code}: {response.text}"
|
||||
)
|
||||
except RequestException as e:
|
||||
LOG.error(f"Subscription request exception: {e}")
|
||||
|
||||
event = UserPlanChanged(plan_end_time=sl_subscription_end)
|
||||
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||
Session.commit()
|
||||
|
44
app/app/user_audit_log_utils.py
Normal file
44
app/app/user_audit_log_utils.py
Normal file
@ -0,0 +1,44 @@
|
||||
from enum import Enum
|
||||
|
||||
from app.models import User, UserAuditLog
|
||||
|
||||
|
||||
class UserAuditLogAction(Enum):
|
||||
CreateUser = "create_user"
|
||||
ActivateUser = "activate_user"
|
||||
ResetPassword = "reset_password"
|
||||
|
||||
Upgrade = "upgrade"
|
||||
SubscriptionExtended = "subscription_extended"
|
||||
SubscriptionCancelled = "subscription_cancelled"
|
||||
LinkAccount = "link_account"
|
||||
UnlinkAccount = "unlink_account"
|
||||
|
||||
CreateMailbox = "create_mailbox"
|
||||
VerifyMailbox = "verify_mailbox"
|
||||
UpdateMailbox = "update_mailbox"
|
||||
DeleteMailbox = "delete_mailbox"
|
||||
|
||||
CreateCustomDomain = "create_custom_domain"
|
||||
VerifyCustomDomain = "verify_custom_domain"
|
||||
UpdateCustomDomain = "update_custom_domain"
|
||||
DeleteCustomDomain = "delete_custom_domain"
|
||||
|
||||
CreateDirectory = "create_directory"
|
||||
UpdateDirectory = "update_directory"
|
||||
DeleteDirectory = "delete_directory"
|
||||
|
||||
UserMarkedForDeletion = "user_marked_for_deletion"
|
||||
DeleteUser = "delete_user"
|
||||
|
||||
|
||||
def emit_user_audit_log(
|
||||
user: User, action: UserAuditLogAction, message: str, commit: bool = False
|
||||
):
|
||||
UserAuditLog.create(
|
||||
user_id=user.id,
|
||||
user_email=user.email,
|
||||
action=action.value,
|
||||
message=message,
|
||||
commit=commit,
|
||||
)
|
@ -3,6 +3,7 @@ from typing import Optional
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import User, SLDomain, CustomDomain, Mailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class CannotSetAlias(Exception):
|
||||
@ -16,12 +17,13 @@ class CannotSetMailbox(Exception):
|
||||
|
||||
|
||||
def set_default_alias_domain(user: User, domain_name: Optional[str]):
|
||||
if domain_name is None:
|
||||
if not domain_name:
|
||||
LOG.i(f"User {user} has set no domain as default domain")
|
||||
user.default_alias_public_domain_id = None
|
||||
user.default_alias_custom_domain_id = None
|
||||
Session.flush()
|
||||
return
|
||||
|
||||
sl_domain: SLDomain = SLDomain.get_by(domain=domain_name)
|
||||
if sl_domain:
|
||||
if sl_domain.hidden:
|
||||
@ -53,7 +55,7 @@ def set_default_alias_domain(user: User, domain_name: Optional[str]):
|
||||
|
||||
|
||||
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != user.id:
|
||||
raise CannotSetMailbox("Invalid mailbox")
|
||||
@ -66,5 +68,11 @@ def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
||||
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
|
||||
|
||||
user.default_mailbox_id = mailbox.id
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set mailbox {mailbox.id} ({mailbox.email}) as default",
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
return mailbox
|
||||
|
@ -1,4 +1,3 @@
|
||||
import random
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
@ -32,8 +31,9 @@ def random_words(words: int = 2, numbers: int = 0):
|
||||
fields = [secrets.choice(_words) for i in range(words)]
|
||||
|
||||
if numbers > 0:
|
||||
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)])
|
||||
return "_".join(fields) + digits
|
||||
digits = [n for n in range(10)]
|
||||
suffix = "".join([str(secrets.choice(digits)) for i in range(numbers)])
|
||||
return "_".join(fields) + suffix
|
||||
else:
|
||||
return "_".join(fields)
|
||||
|
||||
|
90
app/cron.py
90
app/cron.py
@ -14,6 +14,7 @@ from sqlalchemy.sql import Insert, text
|
||||
from app import s3, config
|
||||
from app.alias_utils import nb_email_log_for_mailbox
|
||||
from app.api.views.apple import verify_receipt
|
||||
from app.custom_domain_validation import CustomDomainValidation
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_mx_domains, is_mx_equivalent
|
||||
from app.email_utils import (
|
||||
@ -59,8 +60,11 @@ from app.models import (
|
||||
)
|
||||
from app.pgp_utils import load_public_key_and_check, PGPException
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email
|
||||
from server import create_light_app
|
||||
from tasks.clean_alias_audit_log import cleanup_alias_audit_log
|
||||
from tasks.clean_user_audit_log import cleanup_user_audit_log
|
||||
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||
@ -282,8 +286,16 @@ def notify_manual_sub_end():
|
||||
|
||||
def poll_apple_subscription():
|
||||
"""Poll Apple API to update AppleSubscription"""
|
||||
# todo: only near the end of the subscription
|
||||
for apple_sub in AppleSubscription.all():
|
||||
for apple_sub in (
|
||||
AppleSubscription.filter(
|
||||
AppleSubscription.expires_date < arrow.now().shift(days=15)
|
||||
)
|
||||
.enable_eagerloads(False)
|
||||
.yield_per(100)
|
||||
):
|
||||
if not apple_sub.is_valid():
|
||||
# Subscription is not valid anymore and hasn't been renewed
|
||||
continue
|
||||
if not apple_sub.product_id:
|
||||
LOG.d("Ignore %s", apple_sub)
|
||||
continue
|
||||
@ -896,6 +908,24 @@ def check_mailbox_valid_pgp_keys():
|
||||
|
||||
|
||||
def check_custom_domain():
|
||||
# Delete custom domains that haven't been verified in a month
|
||||
for custom_domain in (
|
||||
CustomDomain.filter(
|
||||
CustomDomain.verified == False, # noqa: E712
|
||||
CustomDomain.created_at < arrow.now().shift(months=-1),
|
||||
)
|
||||
.enable_eagerloads(False)
|
||||
.yield_per(100)
|
||||
):
|
||||
alias_count = Alias.filter(Alias.custom_domain_id == custom_domain.id).count()
|
||||
if alias_count > 0:
|
||||
LOG.warn(
|
||||
f"Custom Domain {custom_domain} has {alias_count} aliases. Won't delete"
|
||||
)
|
||||
else:
|
||||
LOG.i(f"Deleting unverified old custom domain {custom_domain}")
|
||||
CustomDomain.delete(custom_domain.id)
|
||||
|
||||
LOG.d("Check verified domain for DNS issues")
|
||||
|
||||
for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain
|
||||
@ -905,9 +935,11 @@ def check_custom_domain():
|
||||
LOG.i("custom domain has been deleted")
|
||||
|
||||
|
||||
def check_single_custom_domain(custom_domain):
|
||||
def check_single_custom_domain(custom_domain: CustomDomain):
|
||||
mx_domains = get_mx_domains(custom_domain.domain)
|
||||
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY):
|
||||
validator = CustomDomainValidation(dkim_domain=config.EMAIL_DOMAIN)
|
||||
expected_custom_domains = validator.get_expected_mx_records(custom_domain)
|
||||
if not is_mx_equivalent(mx_domains, expected_custom_domains):
|
||||
user = custom_domain.user
|
||||
LOG.w(
|
||||
"The MX record is not correctly set for %s %s %s",
|
||||
@ -965,7 +997,7 @@ def delete_expired_tokens():
|
||||
LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row)
|
||||
|
||||
|
||||
async def _hibp_check(api_key, queue):
|
||||
async def _hibp_check(api_key: str, queue: asyncio.Queue):
|
||||
"""
|
||||
Uses a single API key to check the queue as fast as possible.
|
||||
|
||||
@ -984,11 +1016,16 @@ async def _hibp_check(api_key, queue):
|
||||
if not alias:
|
||||
continue
|
||||
user = alias.user
|
||||
if user.disabled or not user.is_paid():
|
||||
if user.disabled or not user.is_premium():
|
||||
# Mark it as hibp done to skip it as if it had been checked
|
||||
alias.hibp_last_check = arrow.utcnow()
|
||||
Session.commit()
|
||||
continue
|
||||
if alias.flags & Alias.FLAG_PARTNER_CREATED > 0:
|
||||
# Mark as hibp done
|
||||
alias.hibp_last_check = arrow.utcnow()
|
||||
Session.commit()
|
||||
continue
|
||||
|
||||
LOG.d("Checking HIBP for %s", alias)
|
||||
|
||||
@ -1215,7 +1252,7 @@ def notify_hibp():
|
||||
|
||||
|
||||
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||
users = User.filter(
|
||||
users: List[User] = User.filter(
|
||||
and_(
|
||||
User.delete_on.isnot(None),
|
||||
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
||||
@ -1227,6 +1264,11 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||
)
|
||||
if dry_run:
|
||||
continue
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.DeleteUser,
|
||||
message=f"Delete user {user.id} ({user.email})",
|
||||
)
|
||||
User.delete(user.id)
|
||||
Session.commit()
|
||||
|
||||
@ -1238,6 +1280,16 @@ def delete_old_data():
|
||||
cleanup_old_notifications(oldest_valid)
|
||||
|
||||
|
||||
def clear_alias_audit_log():
|
||||
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
|
||||
cleanup_alias_audit_log(oldest_valid)
|
||||
|
||||
|
||||
def clear_user_audit_log():
|
||||
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
|
||||
cleanup_user_audit_log(oldest_valid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
LOG.d("Start running cronjob")
|
||||
parser = argparse.ArgumentParser()
|
||||
@ -1246,22 +1298,6 @@ if __name__ == "__main__":
|
||||
"--job",
|
||||
help="Choose a cron job to run",
|
||||
type=str,
|
||||
choices=[
|
||||
"stats",
|
||||
"notify_trial_end",
|
||||
"notify_manual_subscription_end",
|
||||
"notify_premium_end",
|
||||
"delete_logs",
|
||||
"delete_old_data",
|
||||
"poll_apple_subscription",
|
||||
"sanity_check",
|
||||
"delete_old_monitoring",
|
||||
"check_custom_domain",
|
||||
"check_hibp",
|
||||
"notify_hibp",
|
||||
"cleanup_tokens",
|
||||
"send_undelivered_mails",
|
||||
],
|
||||
)
|
||||
args = parser.parse_args()
|
||||
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
||||
@ -1310,4 +1346,10 @@ if __name__ == "__main__":
|
||||
load_unsent_mails_from_fs_and_resend()
|
||||
elif args.job == "delete_scheduled_users":
|
||||
LOG.d("Deleting users scheduled to be deleted")
|
||||
clear_users_scheduled_to_be_deleted(dry_run=True)
|
||||
clear_users_scheduled_to_be_deleted()
|
||||
elif args.job == "clear_alias_audit_log":
|
||||
LOG.d("Clearing alias audit log")
|
||||
clear_alias_audit_log()
|
||||
elif args.job == "clear_user_audit_log":
|
||||
LOG.d("Clearing user audit log")
|
||||
clear_user_audit_log()
|
||||
|
@ -14,15 +14,28 @@ jobs:
|
||||
- name: SimpleLogin Custom Domain check
|
||||
command: python /code/cron.py -j check_custom_domain
|
||||
shell: /bin/bash
|
||||
schedule: "15 2 * * *"
|
||||
schedule: "15 */4 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
onFailure:
|
||||
retry:
|
||||
maximumRetries: 10
|
||||
initialDelay: 1
|
||||
maximumDelay: 30
|
||||
backoffMultiplier: 2
|
||||
|
||||
- name: SimpleLogin HIBP check
|
||||
command: python /code/cron.py -j check_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "15 3 * * *"
|
||||
schedule: "13 */4 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
onFailure:
|
||||
retry:
|
||||
maximumRetries: 10
|
||||
initialDelay: 1
|
||||
maximumDelay: 30
|
||||
backoffMultiplier: 2
|
||||
|
||||
- name: SimpleLogin Notify HIBP breaches
|
||||
command: python /code/cron.py -j notify_hibp
|
||||
@ -31,6 +44,7 @@ jobs:
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
@ -80,3 +94,17 @@ jobs:
|
||||
schedule: "*/5 * * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin clear alias_audit_log old entries
|
||||
command: python /code/cron.py -j clear_alias_audit_log
|
||||
shell: /bin/bash
|
||||
schedule: "0 * * * *" # Once every hour
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin clear user_audit_log old entries
|
||||
command: python /code/cron.py -j clear_user_audit_log
|
||||
shell: /bin/bash
|
||||
schedule: "0 * * * *" # Once every hour
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
@ -47,13 +47,12 @@ from typing import List, Tuple, Optional
|
||||
import newrelic.agent
|
||||
from aiosmtpd.controller import Controller
|
||||
from aiosmtpd.smtp import Envelope
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from flanker.addresslib import address
|
||||
from flanker.addresslib.address import EmailAddress
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app import pgp_utils, s3, config
|
||||
from app.alias_utils import try_auto_create, change_alias_status
|
||||
from app import pgp_utils, s3, config, contact_utils
|
||||
from app.alias_utils import (
|
||||
try_auto_create,
|
||||
change_alias_status,
|
||||
get_alias_recipient_name,
|
||||
)
|
||||
from app.config import (
|
||||
EMAIL_DOMAIN,
|
||||
URL,
|
||||
@ -145,6 +144,7 @@ from app.handler.unsubscribe_generator import UnsubscribeGenerator
|
||||
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
||||
from app.log import LOG, set_message_id
|
||||
from app.mail_sender import sl_sendmail
|
||||
from app.mailbox_utils import get_mailbox_for_reply_phase
|
||||
from app.message_utils import message_to_bytes
|
||||
from app.models import (
|
||||
Alias,
|
||||
@ -168,12 +168,18 @@ from app.pgp_utils import (
|
||||
sign_data,
|
||||
load_public_key_and_check,
|
||||
)
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.utils import sanitize_email
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from flanker.addresslib import address
|
||||
from flanker.addresslib.address import EmailAddress
|
||||
from init_app import load_pgp_public_keys
|
||||
from server import create_light_app
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
|
||||
def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Contact:
|
||||
def get_or_create_contact(
|
||||
from_header: str, mail_from: str, alias: Alias
|
||||
) -> Optional[Contact]:
|
||||
"""
|
||||
contact_from_header is the RFC 2047 format FROM header
|
||||
"""
|
||||
@ -195,81 +201,18 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||
mail_from,
|
||||
)
|
||||
contact_email = mail_from
|
||||
|
||||
if not is_valid_email(contact_email):
|
||||
LOG.w(
|
||||
"invalid contact email %s. Parse from %s %s",
|
||||
contact_email,
|
||||
from_header,
|
||||
mail_from,
|
||||
)
|
||||
# either reuse a contact with empty email or create a new contact with empty email
|
||||
contact_email = ""
|
||||
|
||||
contact_email = sanitize_email(contact_email, not_lower=True)
|
||||
|
||||
if contact_name and "\x00" in contact_name:
|
||||
LOG.w("issue with contact name %s", contact_name)
|
||||
contact_name = ""
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
if contact:
|
||||
if contact.name != contact_name:
|
||||
LOG.d(
|
||||
"Update contact %s name %s to %s",
|
||||
contact,
|
||||
contact.name,
|
||||
contact_name,
|
||||
)
|
||||
contact.name = contact_name
|
||||
Session.commit()
|
||||
|
||||
# contact created in the past does not have mail_from and from_header field
|
||||
if not contact.mail_from and mail_from:
|
||||
LOG.d(
|
||||
"Set contact mail_from %s: %s to %s",
|
||||
contact,
|
||||
contact.mail_from,
|
||||
mail_from,
|
||||
)
|
||||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
else:
|
||||
alias_id = alias.id
|
||||
try:
|
||||
contact_email_for_reply = (
|
||||
contact_email if is_valid_email(contact_email) else ""
|
||||
)
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias_id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
mail_from=mail_from,
|
||||
reply_email=generate_reply_email(contact_email_for_reply, alias),
|
||||
automatic_created=True,
|
||||
)
|
||||
if not contact_email:
|
||||
LOG.d("Create a contact with invalid email for %s", alias)
|
||||
contact.invalid_email = True
|
||||
|
||||
LOG.d(
|
||||
"create contact %s for %s, reverse alias:%s",
|
||||
contact_email,
|
||||
alias,
|
||||
contact.reply_email,
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
except IntegrityError:
|
||||
# If the tx has been rolled back, the connection is borked. Force close to try to get a new one and start fresh
|
||||
Session.close()
|
||||
LOG.info(
|
||||
f"Contact with email {contact_email} for alias_id {alias_id} already existed, fetching from DB"
|
||||
)
|
||||
contact = Contact.get_by(alias_id=alias_id, website_email=contact_email)
|
||||
|
||||
return contact
|
||||
contact_result = contact_utils.create_contact(
|
||||
email=contact_email,
|
||||
alias=alias,
|
||||
name=contact_name,
|
||||
mail_from=mail_from,
|
||||
allow_empty_email=True,
|
||||
automatic_created=True,
|
||||
from_partner=False,
|
||||
)
|
||||
if contact_result.error:
|
||||
LOG.w(f"Error creating contact: {contact_result.error.value}")
|
||||
return contact_result.contact
|
||||
|
||||
|
||||
def get_or_create_reply_to_contact(
|
||||
@ -294,33 +237,7 @@ def get_or_create_reply_to_contact(
|
||||
)
|
||||
return None
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
|
||||
if contact:
|
||||
return contact
|
||||
else:
|
||||
LOG.d(
|
||||
"create contact %s for alias %s via reply-to header %s",
|
||||
contact_address,
|
||||
alias,
|
||||
reply_to_header,
|
||||
)
|
||||
|
||||
try:
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_address,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_address, alias),
|
||||
automatic_created=True,
|
||||
)
|
||||
Session.commit()
|
||||
except IntegrityError:
|
||||
LOG.w("Contact %s %s already exist", alias, contact_address)
|
||||
Session.rollback()
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
|
||||
|
||||
return contact
|
||||
return contact_utils.create_contact(contact_address, alias, contact_name).contact
|
||||
|
||||
|
||||
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
@ -645,7 +562,7 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||
|
||||
if not user.is_active():
|
||||
LOG.w(f"User {user} has been soft deleted")
|
||||
return False, status.E502
|
||||
return [(False, status.E502)]
|
||||
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot receive emails")
|
||||
@ -666,19 +583,44 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||
from_header = get_header_unicode(msg[headers.FROM])
|
||||
LOG.d("Create or get contact for from_header:%s", from_header)
|
||||
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
|
||||
if not contact:
|
||||
return [(False, status.E504)]
|
||||
alias = (
|
||||
contact.alias
|
||||
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
||||
|
||||
reply_to_contact = None
|
||||
reply_to_contact = []
|
||||
if msg[headers.REPLY_TO]:
|
||||
reply_to = get_header_unicode(msg[headers.REPLY_TO])
|
||||
LOG.d("Create or get contact for reply_to_header:%s", reply_to)
|
||||
# ignore when reply-to = alias
|
||||
if reply_to == alias.email:
|
||||
LOG.i("Reply-to same as alias %s", alias)
|
||||
else:
|
||||
reply_to_contact = get_or_create_reply_to_contact(reply_to, alias, msg)
|
||||
reply_to_header_contents = get_header_unicode(msg[headers.REPLY_TO])
|
||||
if reply_to_header_contents:
|
||||
LOG.d(
|
||||
"Create or get contact for reply_to_header:%s", reply_to_header_contents
|
||||
)
|
||||
for reply_to in [
|
||||
reply_to.strip()
|
||||
for reply_to in reply_to_header_contents.split(",")
|
||||
if reply_to.strip()
|
||||
]:
|
||||
reply_to_name, reply_to_email = parse_full_address(reply_to)
|
||||
if reply_to_email == alias.email:
|
||||
LOG.i("Reply-to same as alias %s", alias)
|
||||
else:
|
||||
reply_contact = get_or_create_reply_to_contact(
|
||||
reply_to_email, alias, msg
|
||||
)
|
||||
if reply_contact:
|
||||
reply_to_contact.append(reply_contact)
|
||||
|
||||
if alias.user.delete_on is not None:
|
||||
LOG.d(f"user {user} is pending to be deleted. Do not forward")
|
||||
EmailLog.create(
|
||||
contact_id=contact.id,
|
||||
user_id=contact.user_id,
|
||||
blocked=True,
|
||||
alias_id=contact.alias_id,
|
||||
commit=True,
|
||||
)
|
||||
return [(True, status.E502)]
|
||||
|
||||
if not alias.enabled or contact.block_forward:
|
||||
LOG.d("%s is disabled, do not forward", alias)
|
||||
@ -770,7 +712,7 @@ def forward_email_to_mailbox(
|
||||
envelope,
|
||||
mailbox,
|
||||
user,
|
||||
reply_to_contact: Optional[Contact],
|
||||
reply_to_contacts: list[Contact],
|
||||
) -> (bool, str):
|
||||
LOG.d("Forward %s -> %s -> %s", contact, alias, mailbox)
|
||||
|
||||
@ -818,7 +760,7 @@ def forward_email_to_mailbox(
|
||||
|
||||
email_log = EmailLog.create(
|
||||
contact_id=contact.id,
|
||||
user_id=user.id,
|
||||
user_id=contact.user_id,
|
||||
mailbox_id=mailbox.id,
|
||||
alias_id=contact.alias_id,
|
||||
message_id=str(msg[headers.MESSAGE_ID]),
|
||||
@ -953,11 +895,13 @@ def forward_email_to_mailbox(
|
||||
add_or_replace_header(msg, "From", new_from_header)
|
||||
LOG.d("From header, new:%s, old:%s", new_from_header, old_from_header)
|
||||
|
||||
if reply_to_contact:
|
||||
reply_to_header = msg[headers.REPLY_TO]
|
||||
new_reply_to_header = reply_to_contact.new_addr()
|
||||
if len(reply_to_contacts) > 0:
|
||||
original_reply_to = get_header_unicode(msg[headers.REPLY_TO])
|
||||
new_reply_to_header = ", ".join(
|
||||
[reply_to_contact.new_addr() for reply_to_contact in reply_to_contacts][:5]
|
||||
)
|
||||
add_or_replace_header(msg, "Reply-To", new_reply_to_header)
|
||||
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, reply_to_header)
|
||||
LOG.d("Reply-To header, new:%s, old:%s", new_reply_to_header, original_reply_to)
|
||||
|
||||
# replace CC & To emails by reverse-alias for all emails that are not alias
|
||||
try:
|
||||
@ -1089,7 +1033,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
return False, status.E503
|
||||
|
||||
user = alias.user
|
||||
mail_from = envelope.mail_from
|
||||
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot send emails")
|
||||
@ -1103,13 +1046,15 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
return False, dmarc_delivery_status
|
||||
|
||||
# Anti-spoofing
|
||||
mailbox = get_mailbox_from_mail_from(mail_from, alias)
|
||||
mailbox = get_mailbox_for_reply_phase(
|
||||
envelope.mail_from, get_header_unicode(msg[headers.FROM]), alias
|
||||
)
|
||||
if not mailbox:
|
||||
if alias.disable_email_spoofing_check:
|
||||
# ignore this error, use default alias mailbox
|
||||
LOG.w(
|
||||
"ignore unknown sender to reverse-alias %s: %s -> %s",
|
||||
mail_from,
|
||||
envelope.mail_from,
|
||||
alias,
|
||||
contact,
|
||||
)
|
||||
@ -1252,23 +1197,11 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
|
||||
Session.commit()
|
||||
|
||||
# make the email comes from alias
|
||||
from_header = alias.email
|
||||
# add alias name from alias
|
||||
if alias.name:
|
||||
LOG.d("Put alias name %s in from header", alias.name)
|
||||
from_header = sl_formataddr((alias.name, alias.email))
|
||||
elif alias.custom_domain:
|
||||
# add alias name from domain
|
||||
if alias.custom_domain.name:
|
||||
LOG.d(
|
||||
"Put domain default alias name %s in from header",
|
||||
alias.custom_domain.name,
|
||||
)
|
||||
from_header = sl_formataddr((alias.custom_domain.name, alias.email))
|
||||
|
||||
LOG.d("From header is %s", from_header)
|
||||
add_or_replace_header(msg, headers.FROM, from_header)
|
||||
recipient_name = get_alias_recipient_name(alias)
|
||||
if recipient_name.message:
|
||||
LOG.d(recipient_name.message)
|
||||
LOG.d("From header is %s", recipient_name.name)
|
||||
add_or_replace_header(msg, headers.FROM, recipient_name.name)
|
||||
|
||||
try:
|
||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||
@ -1460,32 +1393,6 @@ def replace_original_message_id(alias: Alias, email_log: EmailLog, msg: Message)
|
||||
msg[headers.REFERENCES] = " ".join(new_message_ids)
|
||||
|
||||
|
||||
def get_mailbox_from_mail_from(mail_from: str, alias) -> Optional[Mailbox]:
|
||||
"""return the corresponding mailbox given the mail_from and alias
|
||||
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
||||
"""
|
||||
|
||||
def __check(email_address: str, alias: Alias) -> Optional[Mailbox]:
|
||||
for mailbox in alias.mailboxes:
|
||||
if mailbox.email == email_address:
|
||||
return mailbox
|
||||
|
||||
for authorized_address in mailbox.authorized_addresses:
|
||||
if authorized_address.email == email_address:
|
||||
LOG.d(
|
||||
"Found an authorized address for %s %s %s",
|
||||
alias,
|
||||
mailbox,
|
||||
authorized_address,
|
||||
)
|
||||
return mailbox
|
||||
return None
|
||||
|
||||
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
||||
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
||||
return __check(mail_from, alias) or __check(canonicalize_email(mail_from), alias)
|
||||
|
||||
|
||||
def handle_unknown_mailbox(
|
||||
envelope, msg, reply_email: str, user: User, alias: Alias, contact: Contact
|
||||
):
|
||||
@ -1601,7 +1508,9 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
|
||||
LOG.w(
|
||||
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
||||
)
|
||||
change_alias_status(alias, enabled=False)
|
||||
change_alias_status(
|
||||
alias, enabled=False, message=f"Set enabled=False due to {reason}"
|
||||
)
|
||||
|
||||
Notification.create(
|
||||
user_id=user.id,
|
||||
|
@ -2,13 +2,14 @@ import argparse
|
||||
from enum import Enum
|
||||
from sys import argv, exit
|
||||
|
||||
from app.config import DB_URI
|
||||
from app.config import EVENT_LISTENER_DB_URI
|
||||
from app.log import LOG
|
||||
from events import event_debugger
|
||||
from events.runner import Runner
|
||||
from events.event_source import DeadLetterEventSource, PostgresEventSource
|
||||
from events.event_sink import ConsoleEventSink, HttpEventSink
|
||||
|
||||
_DEFAULT_MAX_RETRIES = 100
|
||||
_DEFAULT_MAX_RETRIES = 10
|
||||
|
||||
|
||||
class Mode(Enum):
|
||||
@ -31,7 +32,7 @@ def main(mode: Mode, dry_run: bool, max_retries: int):
|
||||
source = DeadLetterEventSource(max_retries)
|
||||
elif mode == Mode.LISTENER:
|
||||
LOG.i("Using PostgresEventSource")
|
||||
source = PostgresEventSource(DB_URI)
|
||||
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
|
||||
else:
|
||||
raise ValueError(f"Invalid mode: {mode}")
|
||||
|
||||
@ -46,32 +47,67 @@ def main(mode: Mode, dry_run: bool, max_retries: int):
|
||||
runner.run()
|
||||
|
||||
|
||||
def debug_event(event_id: str):
|
||||
LOG.i(f"Debugging event {event_id}")
|
||||
try:
|
||||
event_id_int = int(event_id)
|
||||
except ValueError:
|
||||
raise ValueError(f"Invalid event id: {event_id}")
|
||||
event_debugger.debug_event(event_id_int)
|
||||
|
||||
|
||||
def run_event(event_id: str, delete_on_success: bool):
|
||||
LOG.i(f"Running event {event_id}")
|
||||
try:
|
||||
event_id_int = int(event_id)
|
||||
except ValueError:
|
||||
raise ValueError(f"Invalid event id: {event_id}")
|
||||
event_debugger.run_event(event_id_int, delete_on_success)
|
||||
|
||||
|
||||
def args():
|
||||
parser = argparse.ArgumentParser(description="Run event listener")
|
||||
parser.add_argument(
|
||||
"mode",
|
||||
help="Mode to run",
|
||||
choices=[Mode.DEAD_LETTER.value, Mode.LISTENER.value],
|
||||
subparsers = parser.add_subparsers(dest="command")
|
||||
|
||||
listener_parser = subparsers.add_parser(Mode.LISTENER.value)
|
||||
listener_parser.add_argument(
|
||||
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
|
||||
)
|
||||
parser.add_argument(
|
||||
"max_retries",
|
||||
help="Max retries to consider an event as error and not try to process it again",
|
||||
type=int,
|
||||
nargs="?",
|
||||
default=_DEFAULT_MAX_RETRIES,
|
||||
listener_parser.add_argument("--dry-run", action="store_true")
|
||||
|
||||
dead_letter_parser = subparsers.add_parser(Mode.DEAD_LETTER.value)
|
||||
dead_letter_parser.add_argument(
|
||||
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
|
||||
)
|
||||
parser.add_argument("--dry-run", help="Dry run mode", action="store_true")
|
||||
dead_letter_parser.add_argument("--dry-run", action="store_true")
|
||||
|
||||
debug_parser = subparsers.add_parser("debug")
|
||||
debug_parser.add_argument("event_id", help="ID of the event to debug")
|
||||
|
||||
run_parser = subparsers.add_parser("run")
|
||||
run_parser.add_argument("event_id", help="ID of the event to run")
|
||||
run_parser.add_argument("--delete-on-success", action="store_true")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(argv) < 2:
|
||||
print("Invalid usage. Pass 'listener' or 'dead_letter' as argument")
|
||||
print("Invalid usage. Pass a valid subcommand as argument")
|
||||
exit(1)
|
||||
|
||||
args = args()
|
||||
main(
|
||||
mode=Mode.from_str(args.mode),
|
||||
dry_run=args.dry_run,
|
||||
max_retries=args.max_retries,
|
||||
)
|
||||
|
||||
if args.command in [Mode.LISTENER.value, Mode.DEAD_LETTER.value]:
|
||||
main(
|
||||
mode=Mode.from_str(args.command),
|
||||
dry_run=args.dry_run,
|
||||
max_retries=args.max_retries,
|
||||
)
|
||||
elif args.command == "debug":
|
||||
debug_event(args.event_id)
|
||||
elif args.command == "run":
|
||||
run_event(args.event_id, args.delete_on_success)
|
||||
else:
|
||||
print("Invalid command")
|
||||
exit(1)
|
||||
|
43
app/events/event_debugger.py
Normal file
43
app/events/event_debugger.py
Normal file
@ -0,0 +1,43 @@
|
||||
from app.events.generated import event_pb2
|
||||
from app.models import SyncEvent
|
||||
from events.event_sink import HttpEventSink
|
||||
|
||||
|
||||
def debug_event(event_id: int):
|
||||
event = SyncEvent.get_by(id=event_id)
|
||||
if not event:
|
||||
print("Event not found")
|
||||
return
|
||||
|
||||
print(f"Info for event {event_id}")
|
||||
print(f"- Created at: {event.created_at}")
|
||||
print(f"- Updated at: {event.updated_at}")
|
||||
print(f"- Taken time: {event.taken_time}")
|
||||
print(f"- Retry count: {event.retry_count}")
|
||||
|
||||
print()
|
||||
print("Event contents")
|
||||
event_contents = event.content
|
||||
parsed = event_pb2.Event.FromString(event_contents)
|
||||
|
||||
print(f"- UserID: {parsed.user_id}")
|
||||
print(f"- ExternalUserID: {parsed.external_user_id}")
|
||||
print(f"- PartnerID: {parsed.partner_id}")
|
||||
|
||||
content = parsed.content
|
||||
print(f"Content: {content}")
|
||||
|
||||
|
||||
def run_event(event_id: int, delete_on_success: bool = True):
|
||||
event = SyncEvent.get_by(id=event_id)
|
||||
if not event:
|
||||
print("Event not found")
|
||||
return
|
||||
|
||||
print(f"Processing event {event_id}")
|
||||
sink = HttpEventSink()
|
||||
res = sink.process(event)
|
||||
if res:
|
||||
print(f"Processed event {event_id}")
|
||||
if delete_on_success:
|
||||
SyncEvent.delete(event_id, commit=True)
|
@ -1,4 +1,5 @@
|
||||
import requests
|
||||
import newrelic.agent
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
|
||||
@ -11,6 +12,10 @@ class EventSink(ABC):
|
||||
def process(self, event: SyncEvent) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def send_data_to_webhook(self, data: bytes) -> bool:
|
||||
pass
|
||||
|
||||
|
||||
class HttpEventSink(EventSink):
|
||||
def process(self, event: SyncEvent) -> bool:
|
||||
@ -20,19 +25,28 @@ class HttpEventSink(EventSink):
|
||||
|
||||
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
|
||||
|
||||
if self.send_data_to_webhook(event.content):
|
||||
LOG.info(f"Event {event.id} sent successfully to webhook")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def send_data_to_webhook(self, data: bytes) -> bool:
|
||||
res = requests.post(
|
||||
url=EVENT_WEBHOOK,
|
||||
data=event.content,
|
||||
data=data,
|
||||
headers={"Content-Type": "application/x-protobuf"},
|
||||
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
|
||||
)
|
||||
newrelic.agent.record_custom_event(
|
||||
"EventSentToPartner", {"http_code": res.status_code}
|
||||
)
|
||||
if res.status_code != 200:
|
||||
LOG.warning(
|
||||
f"Failed to send event to webhook: {res.status_code} {res.text}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
LOG.info(f"Event {event.id} sent successfully to webhook")
|
||||
return True
|
||||
|
||||
|
||||
@ -40,3 +54,7 @@ class ConsoleEventSink(EventSink):
|
||||
def process(self, event: SyncEvent) -> bool:
|
||||
LOG.info(f"Handling event {event.id}")
|
||||
return True
|
||||
|
||||
def send_data_to_webhook(self, data: bytes) -> bool:
|
||||
LOG.info(f"Sending {len(data)} bytes to webhook")
|
||||
return True
|
||||
|
@ -46,6 +46,7 @@ class PostgresEventSource(EventSource):
|
||||
cursor = self.__connection.cursor()
|
||||
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
|
||||
|
||||
LOG.info("Starting to listen to events")
|
||||
while True:
|
||||
if select.select([self.__connection], [], [], 5) != ([], [], []):
|
||||
self.__connection.poll()
|
||||
@ -71,7 +72,9 @@ class PostgresEventSource(EventSource):
|
||||
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
|
||||
|
||||
def __connect(self):
|
||||
self.__connection = psycopg2.connect(self.__connection_string)
|
||||
self.__connection = psycopg2.connect(
|
||||
self.__connection_string, application_name="sl-event-listen"
|
||||
)
|
||||
|
||||
from app.db import Session
|
||||
|
||||
@ -82,24 +85,28 @@ class DeadLetterEventSource(EventSource):
|
||||
def __init__(self, max_retries: int):
|
||||
self.__max_retries = max_retries
|
||||
|
||||
def execute_loop(
|
||||
self, on_event: Callable[[SyncEvent], NoReturn]
|
||||
) -> list[SyncEvent]:
|
||||
threshold = arrow.utcnow().shift(minutes=-_DEAD_LETTER_THRESHOLD_MINUTES)
|
||||
events = SyncEvent.get_dead_letter(
|
||||
older_than=threshold, max_retries=self.__max_retries
|
||||
)
|
||||
if events:
|
||||
LOG.info(f"Got {len(events)} dead letter events")
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/dead_letter_events_to_process", len(events)
|
||||
)
|
||||
for event in events:
|
||||
if event.mark_as_taken(allow_taken_older_than=threshold):
|
||||
on_event(event)
|
||||
return events
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||
while True:
|
||||
try:
|
||||
threshold = arrow.utcnow().shift(
|
||||
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
|
||||
)
|
||||
events = SyncEvent.get_dead_letter(
|
||||
older_than=threshold, max_retries=self.__max_retries
|
||||
)
|
||||
if events:
|
||||
LOG.info(f"Got {len(events)} dead letter events")
|
||||
if events:
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/dead_letter_events_to_process", len(events)
|
||||
)
|
||||
for event in events:
|
||||
on_event(event)
|
||||
events = self.execute_loop(on_event)
|
||||
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
|
||||
if not events:
|
||||
LOG.debug("No dead letter events")
|
||||
|
@ -56,14 +56,15 @@ def add_sl_domains():
|
||||
Session.commit()
|
||||
|
||||
|
||||
def add_proton_partner():
|
||||
def add_proton_partner() -> Partner:
|
||||
proton_partner = Partner.get_by(name=PROTON_PARTNER_NAME)
|
||||
if not proton_partner:
|
||||
Partner.create(
|
||||
proton_partner = Partner.create(
|
||||
name=PROTON_PARTNER_NAME,
|
||||
contact_email="simplelogin@protonmail.com",
|
||||
)
|
||||
Session.commit()
|
||||
return proton_partner
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -3,7 +3,7 @@ Run scheduled jobs.
|
||||
Not meant for running job at precise time (+- 1h)
|
||||
"""
|
||||
import time
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
import arrow
|
||||
from sqlalchemy.sql.expression import or_, and_
|
||||
@ -14,11 +14,14 @@ from app.email_utils import (
|
||||
send_email,
|
||||
render,
|
||||
)
|
||||
from app.events.event_dispatcher import PostgresDispatcher
|
||||
from app.import_utils import handle_batch_import
|
||||
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||
from app.jobs.send_event_job import SendEventToWebhookJob
|
||||
from app.log import LOG
|
||||
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from server import create_light_app
|
||||
|
||||
|
||||
@ -127,7 +130,7 @@ def welcome_proton(user):
|
||||
|
||||
def delete_mailbox_job(job: Job):
|
||||
mailbox_id = job.payload.get("mailbox_id")
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
return
|
||||
|
||||
@ -151,10 +154,18 @@ def delete_mailbox_job(job: Job):
|
||||
|
||||
mailbox_email = mailbox.email
|
||||
user = mailbox.user
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.DeleteMailbox,
|
||||
message=f"Delete mailbox {mailbox.id} ({mailbox.email})",
|
||||
)
|
||||
Mailbox.delete(mailbox_id)
|
||||
Session.commit()
|
||||
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
||||
|
||||
if not job.payload.get("send_mail", True):
|
||||
return
|
||||
if alias_transferred_to:
|
||||
send_email(
|
||||
user.email,
|
||||
@ -239,28 +250,41 @@ def process_job(job: Job):
|
||||
|
||||
elif job.name == config.JOB_DELETE_DOMAIN:
|
||||
custom_domain_id = job.payload.get("custom_domain_id")
|
||||
custom_domain = CustomDomain.get(custom_domain_id)
|
||||
custom_domain: Optional[CustomDomain] = CustomDomain.get(custom_domain_id)
|
||||
if not custom_domain:
|
||||
return
|
||||
|
||||
is_subdomain = custom_domain.is_sl_subdomain
|
||||
domain_name = custom_domain.domain
|
||||
user = custom_domain.user
|
||||
|
||||
custom_domain_partner_id = custom_domain.partner_id
|
||||
CustomDomain.delete(custom_domain.id)
|
||||
Session.commit()
|
||||
|
||||
if is_subdomain:
|
||||
message = f"Delete subdomain {custom_domain_id} ({domain_name})"
|
||||
else:
|
||||
message = f"Delete custom domain {custom_domain_id} ({domain_name})"
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.DeleteCustomDomain,
|
||||
message=message,
|
||||
)
|
||||
|
||||
LOG.d("Domain %s deleted", domain_name)
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your domain {domain_name} has been deleted",
|
||||
f"""Domain {domain_name} along with its aliases are deleted successfully.
|
||||
if custom_domain_partner_id is None:
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your domain {domain_name} has been deleted",
|
||||
f"""Domain {domain_name} along with its aliases are deleted successfully.
|
||||
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
retries=3,
|
||||
)
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
retries=3,
|
||||
)
|
||||
elif job.name == config.JOB_SEND_USER_REPORT:
|
||||
export_job = ExportUserDataJob.create_from_job(job)
|
||||
if export_job:
|
||||
@ -276,7 +300,13 @@ SimpleLogin team.
|
||||
user = User.get(user_id)
|
||||
if user and user.activated:
|
||||
LOG.d(f"Sending alias creation events for {user}")
|
||||
send_alias_creation_events_for_user(user)
|
||||
send_alias_creation_events_for_user(
|
||||
user, dispatcher=PostgresDispatcher.get()
|
||||
)
|
||||
elif job.name == config.JOB_SEND_EVENT_TO_WEBHOOK:
|
||||
send_job = SendEventToWebhookJob.create_from_job(job)
|
||||
if send_job:
|
||||
send_job.run()
|
||||
else:
|
||||
LOG.e("Unknown job name %s", job.name)
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
abacus
|
||||
abdomen
|
||||
abdominal
|
||||
abide
|
||||
abiding
|
||||
ability
|
||||
@ -1031,7 +1029,6 @@ chosen
|
||||
chowder
|
||||
chowtime
|
||||
chrome
|
||||
chubby
|
||||
chuck
|
||||
chug
|
||||
chummy
|
||||
@ -2041,8 +2038,6 @@ dwindling
|
||||
dynamic
|
||||
dynamite
|
||||
dynasty
|
||||
dyslexia
|
||||
dyslexic
|
||||
each
|
||||
eagle
|
||||
earache
|
||||
@ -2081,7 +2076,6 @@ eatery
|
||||
eating
|
||||
eats
|
||||
ebay
|
||||
ebony
|
||||
ebook
|
||||
ecard
|
||||
eccentric
|
||||
@ -2375,8 +2369,6 @@ exclude
|
||||
excluding
|
||||
exclusion
|
||||
exclusive
|
||||
excretion
|
||||
excretory
|
||||
excursion
|
||||
excusable
|
||||
excusably
|
||||
@ -2396,8 +2388,6 @@ existing
|
||||
exit
|
||||
exodus
|
||||
exonerate
|
||||
exorcism
|
||||
exorcist
|
||||
expand
|
||||
expanse
|
||||
expansion
|
||||
@ -2483,7 +2473,6 @@ fanning
|
||||
fantasize
|
||||
fantastic
|
||||
fantasy
|
||||
fascism
|
||||
fastball
|
||||
faster
|
||||
fasting
|
||||
@ -3028,7 +3017,6 @@ guiding
|
||||
guileless
|
||||
guise
|
||||
gulf
|
||||
gullible
|
||||
gully
|
||||
gulp
|
||||
gumball
|
||||
@ -3040,10 +3028,6 @@ gurgle
|
||||
gurgling
|
||||
guru
|
||||
gush
|
||||
gusto
|
||||
gusty
|
||||
gutless
|
||||
guts
|
||||
gutter
|
||||
guy
|
||||
guzzler
|
||||
@ -3242,8 +3226,6 @@ humble
|
||||
humbling
|
||||
humbly
|
||||
humid
|
||||
humiliate
|
||||
humility
|
||||
humming
|
||||
hummus
|
||||
humongous
|
||||
@ -3271,7 +3253,6 @@ hurray
|
||||
hurricane
|
||||
hurried
|
||||
hurry
|
||||
hurt
|
||||
husband
|
||||
hush
|
||||
husked
|
||||
@ -3292,8 +3273,6 @@ hypnotic
|
||||
hypnotism
|
||||
hypnotist
|
||||
hypnotize
|
||||
hypocrisy
|
||||
hypocrite
|
||||
ibuprofen
|
||||
ice
|
||||
iciness
|
||||
@ -3323,7 +3302,6 @@ image
|
||||
imaginary
|
||||
imagines
|
||||
imaging
|
||||
imbecile
|
||||
imitate
|
||||
imitation
|
||||
immerse
|
||||
@ -3746,7 +3724,6 @@ machine
|
||||
machinist
|
||||
magazine
|
||||
magenta
|
||||
maggot
|
||||
magical
|
||||
magician
|
||||
magma
|
||||
@ -3968,8 +3945,6 @@ multitude
|
||||
mumble
|
||||
mumbling
|
||||
mumbo
|
||||
mummified
|
||||
mummify
|
||||
mumps
|
||||
munchkin
|
||||
mundane
|
||||
@ -4022,8 +3997,6 @@ napped
|
||||
napping
|
||||
nappy
|
||||
narrow
|
||||
nastily
|
||||
nastiness
|
||||
national
|
||||
native
|
||||
nativity
|
||||
@ -4446,7 +4419,6 @@ pasta
|
||||
pasted
|
||||
pastel
|
||||
pastime
|
||||
pastor
|
||||
pastrami
|
||||
pasture
|
||||
pasty
|
||||
@ -4458,7 +4430,6 @@ path
|
||||
patience
|
||||
patient
|
||||
patio
|
||||
patriarch
|
||||
patriot
|
||||
patrol
|
||||
patronage
|
||||
@ -4549,7 +4520,6 @@ pettiness
|
||||
petty
|
||||
petunia
|
||||
phantom
|
||||
phobia
|
||||
phoenix
|
||||
phonebook
|
||||
phoney
|
||||
@ -4608,7 +4578,6 @@ plot
|
||||
plow
|
||||
ploy
|
||||
pluck
|
||||
plug
|
||||
plunder
|
||||
plunging
|
||||
plural
|
||||
@ -4875,7 +4844,6 @@ pupil
|
||||
puppet
|
||||
puppy
|
||||
purchase
|
||||
pureblood
|
||||
purebred
|
||||
purely
|
||||
pureness
|
||||
@ -5047,7 +5015,6 @@ recharger
|
||||
recipient
|
||||
recital
|
||||
recite
|
||||
reckless
|
||||
reclaim
|
||||
recliner
|
||||
reclining
|
||||
@ -5440,7 +5407,6 @@ rubdown
|
||||
ruby
|
||||
ruckus
|
||||
rudder
|
||||
rug
|
||||
ruined
|
||||
rule
|
||||
rumble
|
||||
@ -5448,7 +5414,6 @@ rumbling
|
||||
rummage
|
||||
rumor
|
||||
runaround
|
||||
rundown
|
||||
runner
|
||||
running
|
||||
runny
|
||||
@ -5518,7 +5483,6 @@ sandpaper
|
||||
sandpit
|
||||
sandstone
|
||||
sandstorm
|
||||
sandworm
|
||||
sandy
|
||||
sanitary
|
||||
sanitizer
|
||||
@ -5541,7 +5505,6 @@ satisfy
|
||||
saturate
|
||||
saturday
|
||||
sauciness
|
||||
saucy
|
||||
sauna
|
||||
savage
|
||||
savanna
|
||||
@ -5552,7 +5515,6 @@ savor
|
||||
saxophone
|
||||
say
|
||||
scabbed
|
||||
scabby
|
||||
scalded
|
||||
scalding
|
||||
scale
|
||||
@ -5587,7 +5549,6 @@ science
|
||||
scientist
|
||||
scion
|
||||
scoff
|
||||
scolding
|
||||
scone
|
||||
scoop
|
||||
scooter
|
||||
@ -5651,8 +5612,6 @@ sedate
|
||||
sedation
|
||||
sedative
|
||||
sediment
|
||||
seduce
|
||||
seducing
|
||||
segment
|
||||
seismic
|
||||
seizing
|
||||
@ -5899,7 +5858,6 @@ skimpily
|
||||
skincare
|
||||
skinless
|
||||
skinning
|
||||
skinny
|
||||
skintight
|
||||
skipper
|
||||
skipping
|
||||
@ -6248,17 +6206,12 @@ stifle
|
||||
stifling
|
||||
stillness
|
||||
stilt
|
||||
stimulant
|
||||
stimulate
|
||||
stimuli
|
||||
stimulus
|
||||
stinger
|
||||
stingily
|
||||
stinging
|
||||
stingray
|
||||
stingy
|
||||
stinking
|
||||
stinky
|
||||
stipend
|
||||
stipulate
|
||||
stir
|
||||
@ -6866,7 +6819,6 @@ unbent
|
||||
unbiased
|
||||
unbitten
|
||||
unblended
|
||||
unblessed
|
||||
unblock
|
||||
unbolted
|
||||
unbounded
|
||||
@ -6947,7 +6899,6 @@ undertone
|
||||
undertook
|
||||
undertow
|
||||
underuse
|
||||
underwear
|
||||
underwent
|
||||
underwire
|
||||
undesired
|
||||
@ -7000,7 +6951,6 @@ unfunded
|
||||
unglazed
|
||||
ungloved
|
||||
unglue
|
||||
ungodly
|
||||
ungraded
|
||||
ungreased
|
||||
unguarded
|
||||
@ -7032,7 +6982,6 @@ uninsured
|
||||
uninvited
|
||||
union
|
||||
uniquely
|
||||
unisexual
|
||||
unison
|
||||
unissued
|
||||
unit
|
||||
@ -7493,8 +7442,6 @@ wheat
|
||||
whenever
|
||||
whiff
|
||||
whimsical
|
||||
whinny
|
||||
whiny
|
||||
whisking
|
||||
whoever
|
||||
whole
|
||||
@ -7600,7 +7547,6 @@ wrongness
|
||||
wrought
|
||||
xbox
|
||||
xerox
|
||||
yahoo
|
||||
yam
|
||||
yanking
|
||||
yapping
|
||||
|
@ -0,0 +1,30 @@
|
||||
"""Custom Domain partner id
|
||||
|
||||
Revision ID: 2441b7ff5da9
|
||||
Revises: 1c14339aae90
|
||||
Create Date: 2024-09-13 15:43:02.425964
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2441b7ff5da9'
|
||||
down_revision = '1c14339aae90'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('custom_domain', sa.Column('partner_id', sa.Integer(), nullable=True, default=None, server_default=None))
|
||||
op.create_foreign_key(None, 'custom_domain', 'partner', ['partner_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'custom_domain', type_='foreignkey')
|
||||
op.drop_column('custom_domain', 'partner_id')
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,31 @@
|
||||
"""contact.flags and custom_domain.pending_deletion
|
||||
|
||||
Revision ID: 88dd7a0abf54
|
||||
Revises: 2441b7ff5da9
|
||||
Create Date: 2024-09-19 15:41:20.910374
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '88dd7a0abf54'
|
||||
down_revision = '2441b7ff5da9'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('contact', sa.Column('flags', sa.Integer(), server_default='0', nullable=False))
|
||||
op.add_column('custom_domain', sa.Column('pending_deletion', sa.Boolean(), server_default='0', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('custom_domain', 'pending_deletion')
|
||||
op.drop_column('contact', 'flags')
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,27 @@
|
||||
"""custom domain indices
|
||||
|
||||
Revision ID: 62afa3a10010
|
||||
Revises: 88dd7a0abf54
|
||||
Create Date: 2024-09-30 11:40:04.127791
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '62afa3a10010'
|
||||
down_revision = '88dd7a0abf54'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_custom_domain_pending_deletion', 'custom_domain', ['pending_deletion'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_custom_domain_user_id', 'custom_domain', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_custom_domain_user_id', table_name='custom_domain', postgresql_concurrently=True)
|
||||
op.drop_index('ix_custom_domain_pending_deletion', table_name='custom_domain', postgresql_concurrently=True)
|
@ -0,0 +1,45 @@
|
||||
"""alias_audit_log
|
||||
|
||||
Revision ID: 91ed7f46dc81
|
||||
Revises: 62afa3a10010
|
||||
Create Date: 2024-10-11 13:22:11.594054
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '91ed7f46dc81'
|
||||
down_revision = '62afa3a10010'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('alias_audit_log',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
|
||||
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('alias_id', sa.Integer(), nullable=False),
|
||||
sa.Column('alias_email', sa.String(length=255), nullable=False),
|
||||
sa.Column('action', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_alias_audit_log_alias_email', 'alias_audit_log', ['alias_email'], unique=False)
|
||||
op.create_index('ix_alias_audit_log_alias_id', 'alias_audit_log', ['alias_id'], unique=False)
|
||||
op.create_index('ix_alias_audit_log_user_id', 'alias_audit_log', ['user_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_alias_audit_log_user_id', table_name='alias_audit_log')
|
||||
op.drop_index('ix_alias_audit_log_alias_id', table_name='alias_audit_log')
|
||||
op.drop_index('ix_alias_audit_log_alias_email', table_name='alias_audit_log')
|
||||
op.drop_table('alias_audit_log')
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,44 @@
|
||||
"""user_audit_log
|
||||
|
||||
Revision ID: 7d7b84779837
|
||||
Revises: 91ed7f46dc81
|
||||
Create Date: 2024-10-16 11:52:49.128644
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7d7b84779837'
|
||||
down_revision = '91ed7f46dc81'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('user_audit_log',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
|
||||
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_email', sa.String(length=255), nullable=False),
|
||||
sa.Column('action', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_user_audit_log_user_email', 'user_audit_log', ['user_email'], unique=False)
|
||||
op.create_index('ix_user_audit_log_user_id', 'user_audit_log', ['user_id'], unique=False)
|
||||
op.create_index('ix_user_audit_log_created_at', 'user_audit_log', ['created_at'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_user_audit_log_user_id', table_name='user_audit_log')
|
||||
op.drop_index('ix_user_audit_log_user_email', table_name='user_audit_log')
|
||||
op.drop_index('ix_user_audit_log_created_at', table_name='user_audit_log')
|
||||
op.drop_table('user_audit_log')
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,27 @@
|
||||
"""alias_audit_log_index_created_at
|
||||
|
||||
Revision ID: 32f25cbf12f6
|
||||
Revises: 7d7b84779837
|
||||
Create Date: 2024-10-16 16:45:36.827161
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '32f25cbf12f6'
|
||||
down_revision = '7d7b84779837'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_alias_audit_log_created_at', 'alias_audit_log', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_alias_audit_log_created_at', table_name='alias_audit_log', postgresql_concurrently=True)
|
@ -0,0 +1,28 @@
|
||||
"""Preserve user id on alias delete
|
||||
|
||||
Revision ID: 4882cc49dde9
|
||||
Revises: 32f25cbf12f6
|
||||
Create Date: 2024-11-06 10:10:40.235991
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4882cc49dde9'
|
||||
down_revision = '32f25cbf12f6'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('deleted_alias', sa.Column('user_id', sa.Integer(), server_default=None, nullable=True))
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_deleted_alias_user_id_created_at', 'deleted_alias', ['user_id', 'created_at'], unique=False, postgresql_concurrently=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_deleted_alias_user_id_created_at', table_name='deleted_alias')
|
||||
op.drop_column('deleted_alias', 'user_id')
|
@ -0,0 +1,28 @@
|
||||
"""Revert user id on deleted alias
|
||||
|
||||
Revision ID: bc9aa210efa3
|
||||
Revises: 4882cc49dde9
|
||||
Create Date: 2024-11-06 12:44:44.129691
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bc9aa210efa3'
|
||||
down_revision = '4882cc49dde9'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_deleted_alias_user_id_created_at', table_name='deleted_alias')
|
||||
op.drop_column('deleted_alias', 'user_id')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.add_column('deleted_alias', sa.Column('user_id', sa.Integer(), server_default=None, nullable=True))
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_deleted_alias_user_id_created_at', 'deleted_alias', ['user_id', 'created_at'], unique=False, postgresql_concurrently=True)
|
@ -0,0 +1,30 @@
|
||||
"""add missing indices on user and mailbox
|
||||
|
||||
Revision ID: 842ac670096e
|
||||
Revises: bc9aa210efa3
|
||||
Create Date: 2024-11-13 15:55:28.798506
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '842ac670096e'
|
||||
down_revision = 'bc9aa210efa3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_mailbox_pgp_finger_print', 'mailbox', ['pgp_finger_print'], unique=False)
|
||||
op.create_index('ix_users_default_mailbox_id', 'users', ['default_mailbox_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_users_default_mailbox_id', table_name='users')
|
||||
op.drop_index('ix_mailbox_pgp_finger_print', table_name='mailbox')
|
@ -0,0 +1,29 @@
|
||||
"""add missing indices on email log
|
||||
|
||||
Revision ID: 12274da2299f
|
||||
Revises: 842ac670096e
|
||||
Create Date: 2024-11-14 10:27:20.371191
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '12274da2299f'
|
||||
down_revision = '842ac670096e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_email_log_bounced_mailbox_id', 'email_log', ['bounced_mailbox_id'], unique=False)
|
||||
op.create_index('ix_email_log_mailbox_id', 'email_log', ['mailbox_id'], unique=False)
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_email_log_mailbox_id', table_name='email_log')
|
||||
op.drop_index('ix_email_log_bounced_mailbox_id', table_name='email_log')
|
@ -0,0 +1,102 @@
|
||||
"""add missing indices for fk constraints
|
||||
|
||||
Revision ID: 0f3ee15b0014
|
||||
Revises: 12274da2299f
|
||||
Create Date: 2024-11-15 12:29:10.739938
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0f3ee15b0014'
|
||||
down_revision = '12274da2299f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_activation_code_user_id', 'activation_code', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_alias_original_owner_id', 'alias', ['original_owner_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_alias_used_on_user_id', 'alias_used_on', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_api_to_cookie_token_api_key_id', 'api_cookie_token', ['api_key_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_api_to_cookie_token_user_id', 'api_cookie_token', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_api_key_code', 'api_key', ['code'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_api_key_user_id', 'api_key', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_authorization_code_client_id', 'authorization_code', ['client_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_authorization_code_user_id', 'authorization_code', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_authorized_address_user_id', 'authorized_address', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_auto_create_rule_custom_domain_id', 'auto_create_rule', ['custom_domain_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_batch_import_file_id', 'batch_import', ['file_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_batch_import_user_id', 'batch_import', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_client_icon_id', 'client', ['icon_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_client_referral_id', 'client', ['referral_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_client_user_id', 'client', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_coupon_used_by_user_id', 'coupon', ['used_by_user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_directory_user_id', 'directory', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_domain_deleted_alias_user_id', 'domain_deleted_alias', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_email_log_refused_email_id', 'email_log', ['refused_email_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_fido_user_id', 'fido', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_file_user_id', 'file', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_hibp_notified_alias_user_id', 'hibp_notified_alias', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_mfa_browser_user_id', 'mfa_browser', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_newsletter_user_user_id', 'newsletter_user', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_oauth_token_client_id', 'oauth_token', ['client_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_oauth_token_user_id', 'oauth_token', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_payout_user_id', 'payout', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_phone_reservation_user_id', 'phone_reservation', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_provider_complaint_refused_email_id', 'provider_complaint', ['refused_email_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_provider_complaint_user_id', 'provider_complaint', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_redirect_uri_client_id', 'redirect_uri', ['client_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_referral_user_id', 'referral', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_refused_email_user_id', 'refused_email', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_reset_password_code_user_id', 'reset_password_code', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_sent_alert_user_id', 'sent_alert', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_users_default_alias_custom_domain_id', 'users', ['default_alias_custom_domain_id'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_users_profile_picture_id', 'users', ['profile_picture_id'], unique=False, postgresql_concurrently=True)
|
||||
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_users_profile_picture_id', table_name='users')
|
||||
op.drop_index('ix_users_default_alias_custom_domain_id', table_name='users')
|
||||
op.drop_index('ix_sent_alert_user_id', table_name='sent_alert')
|
||||
op.drop_index('ix_reset_password_code_user_id', table_name='reset_password_code')
|
||||
op.drop_index('ix_refused_email_user_id', table_name='refused_email')
|
||||
op.drop_index('ix_referral_user_id', table_name='referral')
|
||||
op.drop_index('ix_redirect_uri_client_id', table_name='redirect_uri')
|
||||
op.drop_index('ix_provider_complaint_user_id', table_name='provider_complaint')
|
||||
op.drop_index('ix_provider_complaint_refused_email_id', table_name='provider_complaint')
|
||||
op.drop_index('ix_phone_reservation_user_id', table_name='phone_reservation')
|
||||
op.drop_index('ix_payout_user_id', table_name='payout')
|
||||
op.drop_index('ix_oauth_token_user_id', table_name='oauth_token')
|
||||
op.drop_index('ix_oauth_token_client_id', table_name='oauth_token')
|
||||
op.drop_index('ix_newsletter_user_user_id', table_name='newsletter_user')
|
||||
op.drop_index('ix_mfa_browser_user_id', table_name='mfa_browser')
|
||||
op.drop_index('ix_hibp_notified_alias_user_id', table_name='hibp_notified_alias')
|
||||
op.drop_index('ix_file_user_id', table_name='file')
|
||||
op.drop_index('ix_fido_user_id', table_name='fido')
|
||||
op.drop_index('ix_email_log_refused_email_id', table_name='email_log')
|
||||
op.drop_index('ix_domain_deleted_alias_user_id', table_name='domain_deleted_alias')
|
||||
op.drop_index('ix_directory_user_id', table_name='directory')
|
||||
op.drop_index('ix_coupon_used_by_user_id', table_name='coupon')
|
||||
op.drop_index('ix_client_user_id', table_name='client')
|
||||
op.drop_index('ix_client_referral_id', table_name='client')
|
||||
op.drop_index('ix_client_icon_id', table_name='client')
|
||||
op.drop_index('ix_batch_import_user_id', table_name='batch_import')
|
||||
op.drop_index('ix_batch_import_file_id', table_name='batch_import')
|
||||
op.drop_index('ix_auto_create_rule_custom_domain_id', table_name='auto_create_rule')
|
||||
op.drop_index('ix_authorized_address_user_id', table_name='authorized_address')
|
||||
op.drop_index('ix_authorization_code_user_id', table_name='authorization_code')
|
||||
op.drop_index('ix_authorization_code_client_id', table_name='authorization_code')
|
||||
op.drop_index('ix_api_key_user_id', table_name='api_key')
|
||||
op.drop_index('ix_api_key_code', table_name='api_key')
|
||||
op.drop_index('ix_api_to_cookie_token_user_id', table_name='api_cookie_token')
|
||||
op.drop_index('ix_api_to_cookie_token_api_key_id', table_name='api_cookie_token')
|
||||
op.drop_index('ix_alias_used_on_user_id', table_name='alias_used_on')
|
||||
op.drop_index('ix_alias_original_owner_id', table_name='alias')
|
||||
op.drop_index('ix_activation_code_user_id', table_name='activation_code')
|
35
app/migrations/versions/2024_112619_085f77996ce3_.py
Normal file
35
app/migrations/versions/2024_112619_085f77996ce3_.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 085f77996ce3
|
||||
Revises: 0f3ee15b0014
|
||||
Create Date: 2024-11-26 19:20:32.227899
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '085f77996ce3'
|
||||
down_revision = '0f3ee15b0014'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('partner_subscription', sa.Column('lifetime', sa.Boolean(), server_default='0', nullable=False))
|
||||
op.alter_column('partner_subscription', 'end_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('partner_subscription', 'end_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=False)
|
||||
op.drop_column('partner_subscription', 'lifetime')
|
||||
# ### end Alembic commands ###
|
@ -94,6 +94,20 @@ def log_nb_db_connection():
|
||||
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
|
||||
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def log_nb_db_connection_by_app_name():
|
||||
# get the number of connections to the DB
|
||||
rows = Session.execute(
|
||||
"SELECT application_name, count(datid) FROM pg_stat_activity group by application_name"
|
||||
)
|
||||
for row in rows:
|
||||
if row[0].find("sl-") == 0:
|
||||
LOG.d("number of db connections for app %s = %s", row[0], row[1])
|
||||
newrelic.agent.record_custom_metric(
|
||||
f"Custom/nb_db_app_connection/{row[0]}", row[1]
|
||||
)
|
||||
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def log_pending_to_process_events():
|
||||
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
|
||||
@ -125,6 +139,21 @@ def log_events_pending_dead_letter():
|
||||
)
|
||||
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def log_failed_events():
|
||||
r = Session.execute(
|
||||
"""
|
||||
SELECT COUNT(*)
|
||||
FROM sync_event
|
||||
WHERE retry_count >= 10;
|
||||
""",
|
||||
)
|
||||
failed_events = list(r)[0][0]
|
||||
|
||||
LOG.d("number of failed events %s", failed_events)
|
||||
newrelic.agent.record_custom_metric("Custom/sync_events_failed", failed_events)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exporter = MetricExporter(get_newrelic_license())
|
||||
while True:
|
||||
@ -132,6 +161,8 @@ if __name__ == "__main__":
|
||||
log_nb_db_connection()
|
||||
log_pending_to_process_events()
|
||||
log_events_pending_dead_letter()
|
||||
log_failed_events()
|
||||
log_nb_db_connection_by_app_name()
|
||||
Session.close()
|
||||
|
||||
exporter.run()
|
||||
|
49
app/oneshot/alias_partner_set_flag_and_clear_note.py
Normal file
49
app/oneshot/alias_partner_set_flag_and_clear_note.py
Normal file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import time
|
||||
|
||||
from sqlalchemy import func
|
||||
from app.models import Alias
|
||||
from app.db import Session
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Backfill alias", description="Update alias notes and backfill flag"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
|
||||
)
|
||||
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
|
||||
|
||||
args = parser.parse_args()
|
||||
alias_id_start = args.start_alias_id
|
||||
max_alias_id = args.end_alias_id
|
||||
if max_alias_id == 0:
|
||||
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||
|
||||
print(f"Checking alias {alias_id_start} to {max_alias_id}")
|
||||
step = 10000
|
||||
noteSql = "(note = 'Created through Proton' or note = 'Created through partner Proton')"
|
||||
alias_query = f"UPDATE alias set note = NULL, flags = flags | :flag where id>=:start AND id<:end and {noteSql}"
|
||||
updated = 0
|
||||
start_time = time.time()
|
||||
for batch_start in range(alias_id_start, max_alias_id, step):
|
||||
rows_done = Session.execute(
|
||||
alias_query,
|
||||
{
|
||||
"start": batch_start,
|
||||
"end": batch_start + step,
|
||||
"flag": Alias.FLAG_PARTNER_CREATED,
|
||||
},
|
||||
)
|
||||
updated += rows_done.rowcount
|
||||
Session.commit()
|
||||
elapsed = time.time() - start_time
|
||||
last_batch_id = batch_start + step
|
||||
time_per_alias = elapsed / (last_batch_id)
|
||||
remaining = max_alias_id - last_batch_id
|
||||
time_remaining = remaining / time_per_alias
|
||||
hours_remaining = time_remaining / 60.0
|
||||
print(
|
||||
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f} mins remaining"
|
||||
)
|
||||
print("")
|
62
app/oneshot/send_lifetime_user_events.py
Normal file
62
app/oneshot/send_lifetime_user_events.py
Normal file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import time
|
||||
|
||||
import arrow
|
||||
from sqlalchemy import func
|
||||
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||
from app.models import PartnerUser, User
|
||||
from app.db import Session
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Backfill alias", description="Send lifetime users to proton"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--start_pu_id", default=0, type=int, help="Initial partner_user_id"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
pu_id_start = args.start_pu_id
|
||||
max_pu_id = args.end_pu_id
|
||||
if max_pu_id == 0:
|
||||
max_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
||||
|
||||
print(f"Checking partner user {pu_id_start} to {max_pu_id}")
|
||||
step = 1000
|
||||
done = 0
|
||||
start_time = time.time()
|
||||
with_lifetime = 0
|
||||
for batch_start in range(pu_id_start, max_pu_id, step):
|
||||
users = (
|
||||
Session.query(User)
|
||||
.join(PartnerUser, PartnerUser.user_id == User.id)
|
||||
.filter(
|
||||
PartnerUser.id >= batch_start,
|
||||
PartnerUser.id < batch_start + step,
|
||||
User.lifetime == True, # noqa :E712
|
||||
)
|
||||
).all()
|
||||
for user in users:
|
||||
# Just in case the == True cond is wonky
|
||||
if not user.lifetime:
|
||||
continue
|
||||
with_lifetime += 1
|
||||
event = UserPlanChanged(plan_end_time=arrow.get("2038-01-01").timestamp)
|
||||
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||
Session.flush()
|
||||
Session.commit()
|
||||
elapsed = time.time() - start_time
|
||||
last_batch_id = batch_start + step
|
||||
time_per_alias = elapsed / (last_batch_id)
|
||||
remaining = max_pu_id - last_batch_id
|
||||
time_remaining = remaining / time_per_alias
|
||||
hours_remaining = time_remaining / 60.0
|
||||
print(
|
||||
f"\PartnerUser {batch_start}/{max_pu_id} {with_lifetime} {hours_remaining:.2f} mins remaining"
|
||||
)
|
||||
print(f"With SL lifetime {with_lifetime}")
|
57
app/oneshot/send_plan_change_events.py
Normal file
57
app/oneshot/send_plan_change_events.py
Normal file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import time
|
||||
|
||||
from sqlalchemy import func
|
||||
|
||||
from app.account_linking import send_user_plan_changed_event
|
||||
from app.models import PartnerUser
|
||||
from app.db import Session
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Backfill alias", description="Update alias notes and backfill flag"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--start_pu_id", default=0, type=int, help="Initial partner_user_id"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
pu_id_start = args.start_pu_id
|
||||
max_pu_id = args.end_pu_id
|
||||
if max_pu_id == 0:
|
||||
max_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
||||
|
||||
print(f"Checking partner user {pu_id_start} to {max_pu_id}")
|
||||
step = 100
|
||||
updated = 0
|
||||
start_time = time.time()
|
||||
with_premium = 0
|
||||
with_lifetime = 0
|
||||
for batch_start in range(pu_id_start, max_pu_id, step):
|
||||
partner_users = (
|
||||
Session.query(PartnerUser).filter(
|
||||
PartnerUser.id >= batch_start, PartnerUser.id < batch_start + step
|
||||
)
|
||||
).all()
|
||||
for partner_user in partner_users:
|
||||
event = send_user_plan_changed_event(partner_user)
|
||||
if event is not None:
|
||||
if event.lifetime:
|
||||
with_lifetime += 1
|
||||
else:
|
||||
with_premium += 1
|
||||
updated += 1
|
||||
Session.commit()
|
||||
elapsed = time.time() - start_time
|
||||
last_batch_id = batch_start + step
|
||||
time_per_alias = elapsed / (last_batch_id)
|
||||
remaining = max_pu_id - last_batch_id
|
||||
time_remaining = remaining / time_per_alias
|
||||
hours_remaining = time_remaining / 60.0
|
||||
print(
|
||||
f"\PartnerUser {batch_start}/{max_pu_id} {updated} {hours_remaining:.2f} mins remaining"
|
||||
)
|
||||
print(f"With SL premium {with_premium} lifetime {with_lifetime}")
|
476
app/poetry.lock
generated
476
app/poetry.lock
generated
@ -276,21 +276,6 @@ files = [
|
||||
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backports.entry-points-selectable"
|
||||
version = "1.1.1"
|
||||
description = "Compatibility shim providing selectable entry points for older implementations"
|
||||
optional = false
|
||||
python-versions = ">=2.7"
|
||||
files = [
|
||||
{file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"},
|
||||
{file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||
testing = ["pytest", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "3.2.0"
|
||||
@ -375,35 +360,41 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.15.9"
|
||||
version = "1.35.37"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.15.9-py2.py3-none-any.whl", hash = "sha256:e0a1dbc0a0e460dc6de2f4144b5015edad3ab5c17ee83c6194b1a010d815bc60"},
|
||||
{file = "boto3-1.15.9.tar.gz", hash = "sha256:02f5f7a2b1349760b030c34f90a9cb4600bf8fe3cbc76b801d122bc4cecf3a7f"},
|
||||
{file = "boto3-1.35.37-py3-none-any.whl", hash = "sha256:385ca77bf8ea4ab2d97f6e2435bdb29f77d9301e2f7ac796c2f465753c2adf3c"},
|
||||
{file = "boto3-1.35.37.tar.gz", hash = "sha256:470d981583885859fed2fd1c185eeb01cc03e60272d499bafe41b12625b158c8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.18.9,<1.19.0"
|
||||
jmespath = ">=0.7.1,<1.0.0"
|
||||
s3transfer = ">=0.3.0,<0.4.0"
|
||||
botocore = ">=1.35.37,<1.36.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.18.9"
|
||||
version = "1.35.37"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.18.9-py2.py3-none-any.whl", hash = "sha256:dc3244170254cbba7dfde00b0489f830069d93dd6a9e555178d989072d7ee7c2"},
|
||||
{file = "botocore-1.18.9.tar.gz", hash = "sha256:35b06b8801eb2dd7e708de35581f9c0304740645874f3af5b8b0c1648f8d6365"},
|
||||
{file = "botocore-1.35.37-py3-none-any.whl", hash = "sha256:64f965d4ba7adb8d79ce044c3aef7356e05dd74753cf7e9115b80f477845d920"},
|
||||
{file = "botocore-1.35.37.tar.gz", hash = "sha256:b2b4d29bafd95b698344f2f0577bb67064adbf1735d8a0e3c7473daa59c23ba6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jmespath = ">=0.7.1,<1.0.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
python-dateutil = ">=2.1,<3.0.0"
|
||||
urllib3 = {version = ">=1.20,<1.26", markers = "python_version != \"3.4\""}
|
||||
urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.22.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "cachetools"
|
||||
@ -491,13 +482,13 @@ pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.2.0"
|
||||
version = "3.4.0"
|
||||
description = "Validate configuration and produce human readable error messages."
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"},
|
||||
{file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"},
|
||||
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
|
||||
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -690,6 +681,21 @@ sdist = ["setuptools-rust (>=0.11.4)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
|
||||
|
||||
[[package]]
|
||||
name = "cssbeautifier"
|
||||
version = "1.15.1"
|
||||
description = "CSS unobfuscator and beautifier."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "cssbeautifier-1.15.1.tar.gz", hash = "sha256:9f7064362aedd559c55eeecf6b6bed65e05f33488dcbe39044f0403c26e1c006"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
editorconfig = ">=0.12.2"
|
||||
jsbeautifier = "*"
|
||||
six = ">=1.13.0"
|
||||
|
||||
[[package]]
|
||||
name = "decorator"
|
||||
version = "4.4.2"
|
||||
@ -734,41 +740,40 @@ graph = ["objgraph (>=1.7.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.1"
|
||||
version = "0.3.8"
|
||||
description = "Distribution utilities"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"},
|
||||
{file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"},
|
||||
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
|
||||
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "djlint"
|
||||
version = "1.3.0"
|
||||
version = "1.34.1"
|
||||
description = "HTML Template Linter and Formatter"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
python-versions = ">=3.8.0,<4.0.0"
|
||||
files = [
|
||||
{file = "djlint-1.3.0-py3-none-any.whl", hash = "sha256:0c986bf542cdac3025d431a5b15e6c3977f652f2e76e408dbb5e7aaab6b73d99"},
|
||||
{file = "djlint-1.3.0.tar.gz", hash = "sha256:b2d8e6c0a14f88da165296f0da05795d15299b7ab0a9093d670ce9ffd867bc79"},
|
||||
{file = "djlint-1.34.1-py3-none-any.whl", hash = "sha256:96ff1c464fb6f061130ebc88663a2ea524d7ec51f4b56221a2b3f0320a3cfce8"},
|
||||
{file = "djlint-1.34.1.tar.gz", hash = "sha256:db93fa008d19eaadb0454edf1704931d14469d48508daba2df9941111f408346"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0.1,<9.0.0"
|
||||
colorama = ">=0.4.4,<0.5.0"
|
||||
cssbeautifier = ">=1.14.4,<2.0.0"
|
||||
html-tag-names = ">=0.1.2,<0.2.0"
|
||||
html-void-elements = ">=0.1.0,<0.2.0"
|
||||
importlib-metadata = ">=4.11.0,<5.0.0"
|
||||
pathspec = ">=0.9.0,<0.10.0"
|
||||
jsbeautifier = ">=1.14.4,<2.0.0"
|
||||
json5 = ">=0.9.11,<0.10.0"
|
||||
pathspec = ">=0.12.0,<0.13.0"
|
||||
PyYAML = ">=6.0,<7.0"
|
||||
regex = ">=2022.1.18,<2023.0.0"
|
||||
regex = ">=2023.0.0,<2024.0.0"
|
||||
tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""}
|
||||
tqdm = ">=4.62.2,<5.0.0"
|
||||
|
||||
[package.extras]
|
||||
test = ["coverage (>=6.3.1,<7.0.0)", "pytest (>=7.0.1,<8.0.0)", "pytest-cov (>=3.0.0,<4.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "dkimpy"
|
||||
version = "1.0.5"
|
||||
@ -806,6 +811,16 @@ doh = ["requests", "requests-toolbelt"]
|
||||
idna = ["idna (>=2.1)"]
|
||||
trio = ["sniffio (>=1.1)", "trio (>=0.14.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "editorconfig"
|
||||
version = "0.12.4"
|
||||
description = "EditorConfig File Locator and Interpreter for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "EditorConfig-0.12.4.tar.gz", hash = "sha256:24857fa1793917dd9ccf0c7810a07e05404ce9b823521c7dce22a4fb5d125f80"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "1.1.3"
|
||||
@ -851,15 +866,20 @@ requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.0.12"
|
||||
version = "3.15.4"
|
||||
description = "A platform independent file lock."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"},
|
||||
{file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"},
|
||||
{file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
|
||||
{file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"]
|
||||
typing = ["typing-extensions (>=4.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "flanker"
|
||||
version = "0.9.11"
|
||||
@ -1495,17 +1515,17 @@ pyreadline = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "1.5.5"
|
||||
version = "2.6.0"
|
||||
description = "File identification library for Python"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "identify-1.5.5-py2.py3-none-any.whl", hash = "sha256:da683bfb7669fa749fc7731f378229e2dbf29a1d1337cbde04106f02236eb29d"},
|
||||
{file = "identify-1.5.5.tar.gz", hash = "sha256:7c22c384a2c9b32c5cc891d13f923f6b2653aa83e2d75d8f79be240d6c86c4f4"},
|
||||
{file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"},
|
||||
{file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
license = ["editdistance"]
|
||||
license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
@ -1518,25 +1538,6 @@ files = [
|
||||
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "4.12.0"
|
||||
description = "Read metadata from Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
|
||||
{file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"]
|
||||
perf = ["ipython"]
|
||||
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "1.0.1"
|
||||
@ -1669,6 +1670,31 @@ files = [
|
||||
{file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsbeautifier"
|
||||
version = "1.15.1"
|
||||
description = "JavaScript unobfuscator and beautifier."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "jsbeautifier-1.15.1.tar.gz", hash = "sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
editorconfig = ">=0.12.2"
|
||||
six = ">=1.13.0"
|
||||
|
||||
[[package]]
|
||||
name = "json5"
|
||||
version = "0.9.25"
|
||||
description = "A Python implementation of the JSON5 data format."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"},
|
||||
{file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jwcrypto"
|
||||
version = "0.8"
|
||||
@ -1959,13 +1985,13 @@ urllib3 = ">=1.7,<2"
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.5.0"
|
||||
version = "1.9.1"
|
||||
description = "Node.js virtual environment builder"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"},
|
||||
{file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"},
|
||||
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
|
||||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2015,13 +2041,13 @@ testing = ["docopt", "pytest (>=3.0.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.9.0"
|
||||
version = "0.12.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
|
||||
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
|
||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2117,13 +2143,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "2.17.0"
|
||||
version = "3.8.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"},
|
||||
{file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"},
|
||||
{file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"},
|
||||
{file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2131,8 +2157,7 @@ cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
toml = "*"
|
||||
virtualenv = ">=20.0.8"
|
||||
virtualenv = ">=20.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "prompt-toolkit"
|
||||
@ -2665,85 +2690,104 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2022.6.2"
|
||||
version = "2023.12.25"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "regex-2022.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:042d122f9fee3ceb6d7e3067d56557df697d1aad4ff5f64ecce4dc13a90a7c01"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffef4b30785dc2d1604dfb7cf9fca5dc27cd86d65f7c2a9ec34d6d3ae4565ec2"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0afa6a601acf3c0dc6de4e8d7d8bbce4e82f8542df746226cd35d4a6c15e9456"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a11cbe8eb5fb332ae474895b5ead99392a4ea568bd2a258ab8df883e9c2bf92"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c1f62ee2ba880e221bc950651a1a4b0176083d70a066c83a50ef0cb9b178e12"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aba3d13c77173e9bfed2c2cea7fc319f11c89a36fcec08755e8fb169cf3b0df"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249437f7f5b233792234aeeecb14b0aab1566280de42dfc97c26e6f718297d68"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:179410c79fa86ef318d58ace233f95b87b05a1db6dc493fa29404a43f4b215e2"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5e201b1232d81ca1a7a22ab2f08e1eccad4e111579fd7f3bbf60b21ef4a16cea"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fdecb225d0f1d50d4b26ac423e0032e76d46a788b83b4e299a520717a47d968c"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:be57f9c7b0b423c66c266a26ad143b2c5514997c05dd32ce7ca95c8b209c2288"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ed657a07d8a47ef447224ea00478f1c7095065dfe70a89e7280e5f50a5725131"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:24908aefed23dd065b4a668c0b4ca04d56b7f09d8c8e89636cf6c24e64e67a1e"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-win32.whl", hash = "sha256:775694cd0bb2c4accf2f1cdd007381b33ec8b59842736fe61bdbad45f2ac7427"},
|
||||
{file = "regex-2022.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:809bbbbbcf8258049b031d80932ba71627d2274029386f0452e9950bcfa2c6e8"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2b5d983eb0adf2049d41f95205bdc3de4e6cc2350e9c80d4409d3a75229de"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4c101746a8dac0401abefa716b357c546e61ea2e3d4a564a9db9eac57ccbce"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:166ae7674d0a0e0f8044e7335ba86d0716c9d49465cff1b153f908e0470b8300"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5eac5d8a8ac9ccf00805d02a968a36f5c967db6c7d2b747ab9ed782b3b3a28b"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57823f35b18d82b201c1b27ce4e55f88e79e81d9ca07b50ce625d33823e1439"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d42e3b7b23473729adbf76103e7df75f9167a5a80b1257ca30688352b4bb2dc"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2932e728bee0a634fe55ee54d598054a5a9ffe4cd2be21ba2b4b8e5f8064c2c"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:17764683ea01c2b8f103d99ae9de2473a74340df13ce306c49a721f0b1f0eb9e"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:2ac29b834100d2c171085ceba0d4a1e7046c434ddffc1434dbc7f9d59af1e945"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:f43522fb5d676c99282ca4e2d41e8e2388427c0cf703db6b4a66e49b10b699a8"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:9faa01818dad9111dbf2af26c6e3c45140ccbd1192c3a0981f196255bf7ec5e6"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:17443f99b8f255273731f915fdbfea4d78d809bb9c3aaf67b889039825d06515"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-win32.whl", hash = "sha256:4a5449adef907919d4ce7a1eab2e27d0211d1b255bf0b8f5dd330ad8707e0fc3"},
|
||||
{file = "regex-2022.6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4d206703a96a39763b5b45cf42645776f5553768ea7f3c2c1a39a4f59cafd4ba"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fcd7c432202bcb8b642c3f43d5bcafc5930d82fe5b2bf2c008162df258445c1d"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:186c5a4a4c40621f64d771038ede20fca6c61a9faa8178f9e305aaa0c2442a97"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:047b2d1323a51190c01b6604f49fe09682a5c85d3c1b2c8b67c1cd68419ce3c4"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30637e7fa4acfed444525b1ab9683f714be617862820578c9fd4e944d4d9ad1f"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adafe6f2c6d86dbf3313866b61180530ca4dcd0c264932dc8fa1ffb10871d58"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67ae3601edf86e15ebe40885e5bfdd6002d34879070be15cf18fc0d80ea24fed"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:48dddddce0ea7e7c3e92c1e0c5a28c13ca4dc9cf7e996c706d00479652bff76c"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:68e5c641645351eb9eb12c465876e76b53717f99e9b92aea7a2dd645a87aa7aa"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8fd5f8ae42f789538bb634bdfd69b9aa357e76fdfd7ad720f32f8994c0d84f1e"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:71988a76fcb68cc091e901fddbcac0f9ad9a475da222c47d3cf8db0876cb5344"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4b8838f70be3ce9e706df9d72f88a0aa7d4c1fea61488e06fdf292ccb70ad2be"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:663dca677bd3d2e2b5b7d0329e9f24247e6f38f3b740dd9a778a8ef41a76af41"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-win32.whl", hash = "sha256:24963f0b13cc63db336d8da2a533986419890d128c551baacd934c249d51a779"},
|
||||
{file = "regex-2022.6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ceff75127f828dfe7ceb17b94113ec2df4df274c4cd5533bb299cb099a18a8ca"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a6f2698cfa8340dfe4c0597782776b393ba2274fe4c079900c7c74f68752705"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8a08ace913c4101f0dc0be605c108a3761842efd5f41a3005565ee5d169fb2b"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26dbe90b724efef7820c3cf4a0e5be7f130149f3d2762782e4e8ac2aea284a0b"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5f759a1726b995dc896e86f17f9c0582b54eb4ead00ed5ef0b5b22260eaf2d0"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fc26bb3415e7aa7495c000a2c13bf08ce037775db98c1a3fac9ff04478b6930"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52684da32d9003367dc1a1c07e059b9bbaf135ad0764cd47d8ac3dba2df109bc"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c1264eb40a71cf2bff43d6694ab7254438ca19ef330175060262b3c8dd3931a"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bc635ab319c9b515236bdf327530acda99be995f9d3b9f148ab1f60b2431e970"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:27624b490b5d8880f25dac67e1e2ea93dfef5300b98c6755f585799230d6c746"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:555f7596fd1f123f8c3a67974c01d6ef80b9769e04d660d6c1a7cc3e6cff7069"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:933e72fbe1829cbd59da2bc51ccd73d73162f087f88521a87a8ec9cb0cf10fa8"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cff5c87e941292c97d11dc81bd20679f56a2830f0f0e32f75b8ed6e0eb40f704"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c757f3a27b6345de13ef3ca956aa805d7734ce68023e84d0fc74e1f09ce66f7a"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-win32.whl", hash = "sha256:a58d21dd1a2d6b50ed091554ff85e448fce3fe33a4db8b55d0eba2ca957ed626"},
|
||||
{file = "regex-2022.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:495a4165172848503303ed05c9d0409428f789acc27050fe2cf0a4549188a7d5"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1ab5cf7d09515548044e69d3a0ec77c63d7b9dfff4afc19653f638b992573126"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c1ea28f0ee6cbe4c0367c939b015d915aa9875f6e061ba1cf0796ca9a3010570"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de1ecf26ce85521bf73897828b6d0687cc6cf271fb6ff32ac63d26b21f5e764"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7c7044aabdad2329974be2246babcc21d3ede852b3971a90fd8c2056c20360"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53d69d77e9cfe468b000314dd656be85bb9e96de088a64f75fe128dfe1bf30dd"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8d61883a38b1289fba9944a19a361875b5c0170b83cdcc95ea180247c1b7d3"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5429202bef174a3760690d912e3a80060b323199a61cef6c6c29b30ce09fd17"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e85b10280cf1e334a7c95629f6cbbfe30b815a4ea5f1e28d31f79eb92c2c3d93"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c400dfed4137f32127ea4063447006d7153c974c680bf0fb1b724cce9f8567fc"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f648037c503985aed39f85088acab6f1eb6a0482d7c6c665a5712c9ad9eaefc"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e7b2ff451f6c305b516281ec45425dd423223c8063218c5310d6f72a0a7a517c"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:be456b4313a86be41706319c397c09d9fdd2e5cdfde208292a277b867e99e3d1"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c3db393b21b53d7e1d3f881b64c29d886cbfdd3df007e31de68b329edbab7d02"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-win32.whl", hash = "sha256:d70596f20a03cb5f935d6e4aad9170a490d88fc4633679bf00c652e9def4619e"},
|
||||
{file = "regex-2022.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:3b9b6289e03dbe6a6096880d8ac166cb23c38b4896ad235edee789d4e8697152"},
|
||||
{file = "regex-2022.6.2.tar.gz", hash = "sha256:f7b43acb2c46fb2cd506965b2d9cf4c5e64c9c612bac26c1187933c7296bf08c"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"},
|
||||
{file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"},
|
||||
{file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"},
|
||||
{file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"},
|
||||
{file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"},
|
||||
{file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"},
|
||||
{file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"},
|
||||
{file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2857,50 +2901,72 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "s3transfer"
|
||||
version = "0.3.3"
|
||||
version = "0.10.3"
|
||||
description = "An Amazon S3 Transfer Manager"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "s3transfer-0.3.3-py2.py3-none-any.whl", hash = "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13"},
|
||||
{file = "s3transfer-0.3.3.tar.gz", hash = "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db"},
|
||||
{file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"},
|
||||
{file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.12.36,<2.0a.0"
|
||||
botocore = ">=1.33.2,<2.0a.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "1.5.11"
|
||||
version = "2.16.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry-sdk-1.5.11.tar.gz", hash = "sha256:6c01d9d0b65935fd275adc120194737d1df317dce811e642cbf0394d0d37a007"},
|
||||
{file = "sentry_sdk-1.5.11-py2.py3-none-any.whl", hash = "sha256:c17179183cac614e900cbd048dab03f49a48e2820182ec686c25e7ce46f8548f"},
|
||||
{file = "sentry_sdk-2.16.0-py2.py3-none-any.whl", hash = "sha256:49139c31ebcd398f4f6396b18910610a0c1602f6e67083240c33019d1f6aa30c"},
|
||||
{file = "sentry_sdk-2.16.0.tar.gz", hash = "sha256:90f733b32e15dfc1999e6b7aca67a38688a567329de4d6e184154a73f96c6892"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
urllib3 = ">=1.10.0"
|
||||
urllib3 = ">=1.26.11"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp (>=3.5)"]
|
||||
anthropic = ["anthropic (>=0.16)"]
|
||||
arq = ["arq (>=0.23)"]
|
||||
asyncpg = ["asyncpg (>=0.23)"]
|
||||
beam = ["apache-beam (>=2.12)"]
|
||||
bottle = ["bottle (>=0.12.13)"]
|
||||
celery = ["celery (>=3)"]
|
||||
celery-redbeat = ["celery-redbeat (>=2)"]
|
||||
chalice = ["chalice (>=1.16.0)"]
|
||||
clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
|
||||
django = ["django (>=1.8)"]
|
||||
falcon = ["falcon (>=1.4)"]
|
||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||
fastapi = ["fastapi (>=0.79.0)"]
|
||||
flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"]
|
||||
grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"]
|
||||
http2 = ["httpcore[http2] (==1.*)"]
|
||||
httpx = ["httpx (>=0.16.0)"]
|
||||
huey = ["huey (>=2)"]
|
||||
huggingface-hub = ["huggingface-hub (>=0.22)"]
|
||||
langchain = ["langchain (>=0.0.210)"]
|
||||
litestar = ["litestar (>=2.0.0)"]
|
||||
loguru = ["loguru (>=0.5)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
pymongo = ["pymongo (>=3.1)"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||
rq = ["rq (>=0.6)"]
|
||||
sanic = ["sanic (>=0.8)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||
tornado = ["tornado (>=5)"]
|
||||
starlette = ["starlette (>=0.19.1)"]
|
||||
starlite = ["starlite (>=1.48)"]
|
||||
tornado = ["tornado (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
@ -3130,17 +3196,6 @@ idna = "*"
|
||||
requests = ">=2.1.0"
|
||||
requests-file = ">=1.4"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.1"
|
||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"},
|
||||
{file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
@ -3272,41 +3327,39 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.25.10"
|
||||
version = "1.26.20"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
files = [
|
||||
{file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"},
|
||||
{file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"},
|
||||
{file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
|
||||
{file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
|
||||
brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.8.1"
|
||||
version = "20.21.1"
|
||||
description = "Virtual Python Environment builder"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "virtualenv-20.8.1-py2.py3-none-any.whl", hash = "sha256:10062e34c204b5e4ec5f62e6ef2473f8ba76513a9a617e873f1f8fb4a519d300"},
|
||||
{file = "virtualenv-20.8.1.tar.gz", hash = "sha256:bcc17f0b3a29670dd777d6f0755a4c04f28815395bca279cdcb213b97199a6b8"},
|
||||
{file = "virtualenv-20.21.1-py3-none-any.whl", hash = "sha256:09ddbe1af0c8ed2bb4d6ed226b9e6415718ad18aef9fa0ba023d96b7a8356049"},
|
||||
{file = "virtualenv-20.21.1.tar.gz", hash = "sha256:4c104ccde994f8b108163cf9ba58f3d11511d9403de87fb9b4f52bf33dbc8668"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
"backports.entry-points-selectable" = ">=1.0.4"
|
||||
distlib = ">=0.3.1,<1"
|
||||
filelock = ">=3.0.0,<4"
|
||||
platformdirs = ">=2,<3"
|
||||
six = ">=1.9.0,<2"
|
||||
distlib = ">=0.3.6,<1"
|
||||
filelock = ">=3.4.1,<4"
|
||||
platformdirs = ">=2.4,<4"
|
||||
|
||||
[package.extras]
|
||||
docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"]
|
||||
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"]
|
||||
docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
|
||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "watchtower"
|
||||
@ -3605,21 +3658,6 @@ files = [
|
||||
idna = ">=2.0"
|
||||
multidict = ">=4.0"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.2.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "zipp-3.2.0-py3-none-any.whl", hash = "sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6"},
|
||||
{file = "zipp-3.2.0.tar.gz", hash = "sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||
testing = ["func-timeout", "jaraco.itertools", "jaraco.test (>=3.2.0)", "pytest (>=3.5,!=3.7.3)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "zope.event"
|
||||
version = "4.5.0"
|
||||
@ -3698,4 +3736,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "01afc410d21eeac0a0ac7e8ef6eeb0a991cf4bc091c3351049263462e205ff63"
|
||||
content-hash = "314f199bd50ccbf636ce1c6c753f8c79a1f5a16aa7c1a330a2ec514a13dbad2d"
|
||||
|
@ -4,27 +4,30 @@ package simplelogin_events;
|
||||
|
||||
message UserPlanChanged {
|
||||
uint32 plan_end_time = 1;
|
||||
bool lifetime = 2;
|
||||
}
|
||||
|
||||
message UserDeleted {
|
||||
}
|
||||
|
||||
message AliasCreated {
|
||||
uint32 alias_id = 1;
|
||||
string alias_email = 2;
|
||||
string alias_note = 3;
|
||||
uint32 id = 1;
|
||||
string email = 2;
|
||||
string note = 3;
|
||||
bool enabled = 4;
|
||||
uint32 created_at = 5;
|
||||
}
|
||||
|
||||
message AliasStatusChanged {
|
||||
uint32 alias_id = 1;
|
||||
string alias_email = 2;
|
||||
uint32 id = 1;
|
||||
string email = 2;
|
||||
bool enabled = 3;
|
||||
uint32 created_at = 4;
|
||||
}
|
||||
|
||||
message AliasDeleted {
|
||||
uint32 alias_id = 1;
|
||||
string alias_email = 2;
|
||||
uint32 id = 1;
|
||||
string email = 2;
|
||||
}
|
||||
|
||||
message AliasCreatedList {
|
||||
|
@ -1,20 +1,101 @@
|
||||
[project]
|
||||
name = "SimpleLogin"
|
||||
version = "0.1.0"
|
||||
description = "SimpleLogin partner API"
|
||||
authors = [ {name="SimpleLogin", email="dev@simplelogin.io"}]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/simple-login/app"
|
||||
keywords = ["email", "alias", "privacy", "oauth2", "openid"]
|
||||
packages = [
|
||||
{ include = "app/" },
|
||||
{ include = "migrations/" },
|
||||
]
|
||||
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
||||
|
||||
requires-python = "~=3.10"
|
||||
|
||||
dependencies = [
|
||||
"flask ~= 1.1.2",
|
||||
"flask_login ~= 0.5.0",
|
||||
"wtforms ~= 2.3.3",
|
||||
"unidecode ~= 1.1.1",
|
||||
"gunicorn ~= 20.0.4",
|
||||
"bcrypt ~= 3.2.0",
|
||||
"python-dotenv ~= 0.14.0",
|
||||
"ipython ~= 7.31.1",
|
||||
"sqlalchemy_utils ~= 0.36.8",
|
||||
"psycopg2-binary ~= 2.9.3",
|
||||
"sentry_sdk ~= 2.20.0",
|
||||
"blinker ~= 1.4",
|
||||
"arrow ~= 0.16.0",
|
||||
"Flask-WTF ~= 0.14.3",
|
||||
"boto3 ~= 1.35.37",
|
||||
"Flask-Migrate ~= 2.5.3",
|
||||
"flask_admin ~= 1.5.6",
|
||||
"flask-cors ~= 3.0.9",
|
||||
"watchtower ~= 0.8.0",
|
||||
"sqlalchemy-utils == 0.36.8",
|
||||
"jwcrypto ~= 0.8",
|
||||
"yacron~=0.11.2",
|
||||
"flask-debugtoolbar ~= 0.11.0",
|
||||
"requests_oauthlib ~= 1.3.0",
|
||||
"pyopenssl ~= 19.1.0",
|
||||
"aiosmtpd ~= 1.2",
|
||||
"dnspython==2.0.0",
|
||||
"coloredlogs ~= 14.0",
|
||||
"pycryptodome ~= 3.9.8",
|
||||
"phpserialize ~= 1.3",
|
||||
"dkimpy ~= 1.0.5",
|
||||
"pyotp ~= 2.4.0",
|
||||
"flask_profiler ~= 1.8.1",
|
||||
"facebook-sdk ~= 3.1.0",
|
||||
"google-api-python-client ~= 1.12.3",
|
||||
"google-auth-httplib2 ~= 0.0.4",
|
||||
"python-gnupg ~= 0.4.6",
|
||||
"webauthn ~= 0.4.7",
|
||||
"pyspf ~= 2.0.14",
|
||||
"Flask-Limiter == 1.4",
|
||||
"memory_profiler ~= 0.57.0",
|
||||
"gevent ~= 24.11.1",
|
||||
"email-validator ~= 1.1.3",
|
||||
"PGPy == 0.5.4",
|
||||
"coinbase-commerce ~= 1.0.1",
|
||||
"requests ~= 2.25.1",
|
||||
"newrelic ~= 8.8.0",
|
||||
"flanker ~= 0.9.11",
|
||||
"pyre2 ~= 0.3.6",
|
||||
"tldextract ~= 3.1.2",
|
||||
"flask-debugtoolbar-sqlalchemy ~= 0.2.0",
|
||||
"twilio ~= 7.3.2",
|
||||
"Deprecated ~= 1.2.13",
|
||||
"MarkupSafe~=1.1.1",
|
||||
"cryptography ~= 37.0.1",
|
||||
"SQLAlchemy ~= 1.3.24",
|
||||
"redis==4.6.0",
|
||||
"newrelic-telemetry-sdk ~= 0.5.0",
|
||||
"aiospamc == 0.10",
|
||||
"itsdangerous ~= 1.1.0",
|
||||
"werkzeug ~= 1.0.1",
|
||||
"alembic ~= 1.4.3",
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
target-version = ['py310']
|
||||
exclude = '''
|
||||
(
|
||||
/(
|
||||
\.eggs # exclude a few common directories in the
|
||||
| \.git # root of the project
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
| migrations # migrations/ is generated by alembic
|
||||
| app/events/generated
|
||||
/(
|
||||
\.eggs # exclude a few common directories in the
|
||||
| \.git # root of the project
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
| migrations # migrations/ is generated by alembic
|
||||
| app/events/generated
|
||||
)/
|
||||
)
|
||||
'''
|
||||
@ -27,7 +108,6 @@ exclude = [".venv", "migrations", "app/events/generated"]
|
||||
indent = 2
|
||||
profile = "jinja"
|
||||
blank_line_after_tag = "if,for,include,load,extends,block,endcall"
|
||||
|
||||
# H006: Images should have a height attribute
|
||||
# H013: Images should have an alt attribute
|
||||
# H016: Missing title tag in html. | False positive on template
|
||||
@ -43,92 +123,26 @@ blank_line_after_tag = "if,for,include,load,extends,block,endcall"
|
||||
# T001: Variables should be wrapped in a single whitespace. | Messes up with comments
|
||||
ignore = "H006,H013,H016,H017,H019,H021,H025,H030,H031,T003,J004,J018,T001"
|
||||
|
||||
[tool.poetry]
|
||||
name = "SimpleLogin"
|
||||
version = "0.1.0"
|
||||
description = "open-source email alias solution"
|
||||
authors = ["SimpleLogin <dev@simplelogin.io>"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/simple-login/app"
|
||||
keywords = ["email", "alias", "privacy", "oauth2", "openid"]
|
||||
packages = [
|
||||
{ include = "app/" },
|
||||
{ include = "migrations/" },
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
"pytest ~= 7.0.0",
|
||||
"pytest-cov ~= 3.0.0",
|
||||
"pre-commit ~= 2.17.0",
|
||||
"black ~= 22.1.0",
|
||||
"djlint==1.34.1",
|
||||
"pylint ~= 2.14.4",
|
||||
"ruff ~= 0.1.5",
|
||||
]
|
||||
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
flask = "^1.1.2"
|
||||
flask_login = "^0.5.0"
|
||||
wtforms = "^2.3.3"
|
||||
unidecode = "^1.1.1"
|
||||
gunicorn = "^20.0.4"
|
||||
bcrypt = "^3.2.0"
|
||||
python-dotenv = "^0.14.0"
|
||||
ipython = "^7.31.1"
|
||||
sqlalchemy_utils = "^0.36.8"
|
||||
psycopg2-binary = "^2.9.3"
|
||||
sentry_sdk = "^1.5.11"
|
||||
blinker = "^1.4"
|
||||
arrow = "^0.16.0"
|
||||
Flask-WTF = "^0.14.3"
|
||||
boto3 = "^1.15.9"
|
||||
Flask-Migrate = "^2.5.3"
|
||||
flask_admin = "^1.5.6"
|
||||
flask-cors = "^3.0.9"
|
||||
watchtower = "^0.8.0"
|
||||
sqlalchemy-utils = "^0.36.8"
|
||||
jwcrypto = "^0.8"
|
||||
yacron = "^0.11.1"
|
||||
flask-debugtoolbar = "^0.11.0"
|
||||
requests_oauthlib = "^1.3.0"
|
||||
pyopenssl = "^19.1.0"
|
||||
aiosmtpd = "^1.2"
|
||||
dnspython = "^2.0.0"
|
||||
coloredlogs = "^14.0"
|
||||
pycryptodome = "^3.9.8"
|
||||
phpserialize = "^1.3"
|
||||
dkimpy = "^1.0.5"
|
||||
pyotp = "^2.4.0"
|
||||
flask_profiler = "^1.8.1"
|
||||
facebook-sdk = "^3.1.0"
|
||||
google-api-python-client = "^1.12.3"
|
||||
google-auth-httplib2 = "^0.0.4"
|
||||
python-gnupg = "^0.4.6"
|
||||
webauthn = "^0.4.7"
|
||||
pyspf = "^2.0.14"
|
||||
Flask-Limiter = "^1.4"
|
||||
memory_profiler = "^0.57.0"
|
||||
gevent = "22.10.2"
|
||||
email_validator = "^1.1.1"
|
||||
PGPy = "0.5.4"
|
||||
coinbase-commerce = "^1.0.1"
|
||||
requests = "^2.25.1"
|
||||
newrelic = "8.8.0"
|
||||
flanker = "^0.9.11"
|
||||
pyre2 = "^0.3.6"
|
||||
tldextract = "^3.1.2"
|
||||
flask-debugtoolbar-sqlalchemy = "^0.2.0"
|
||||
twilio = "^7.3.2"
|
||||
Deprecated = "^1.2.13"
|
||||
cryptography = "37.0.1"
|
||||
SQLAlchemy = "1.3.24"
|
||||
redis = "^4.5.3"
|
||||
newrelic-telemetry-sdk = "^0.5.0"
|
||||
aiospamc = "0.10"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^7.0.0"
|
||||
pytest-cov = "^3.0.0"
|
||||
pre-commit = "^2.17.0"
|
||||
black = "^22.1.0"
|
||||
djlint = "^1.3.0"
|
||||
pylint = "^2.14.4"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.1.5"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
build-backend = "poetry.masonry.api"
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.metadata]
|
||||
allow-direct-references = true
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = ["app", "local_data", "migrations", "templates"]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["app", "local_data", "migrations", "templates"]
|
||||
|
469
app/requirements-dev.lock
Normal file
469
app/requirements-dev.lock
Normal file
@ -0,0 +1,469 @@
|
||||
# generated by rye
|
||||
# use `rye lock` or `rye sync` to update this lockfile
|
||||
#
|
||||
# last locked with the following flags:
|
||||
# pre: false
|
||||
# features: []
|
||||
# all-features: false
|
||||
# with-sources: false
|
||||
# generate-hashes: false
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
aiohappyeyeballs==2.4.4
|
||||
# via aiohttp
|
||||
aiohttp==3.11.11
|
||||
# via yacron
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
aiosmtpd==1.4.6
|
||||
# via simplelogin
|
||||
aiosmtplib==3.0.2
|
||||
# via yacron
|
||||
aiospamc==0.10.0
|
||||
# via simplelogin
|
||||
alembic==1.14.0
|
||||
# via flask-migrate
|
||||
appnope==0.1.4
|
||||
# via ipython
|
||||
arrow==0.16.0
|
||||
# via simplelogin
|
||||
astroid==2.11.7
|
||||
# via pylint
|
||||
async-timeout==5.0.1
|
||||
# via aiohttp
|
||||
# via redis
|
||||
atpublic==5.0
|
||||
# via aiosmtpd
|
||||
attrs==24.3.0
|
||||
# via aiohttp
|
||||
# via aiosmtpd
|
||||
# via flanker
|
||||
# via pytest
|
||||
backcall==0.2.0
|
||||
# via ipython
|
||||
bcrypt==3.2.2
|
||||
# via simplelogin
|
||||
black==22.1.0
|
||||
blinker==1.9.0
|
||||
# via flask-debugtoolbar
|
||||
# via simplelogin
|
||||
boto3==1.35.99
|
||||
# via simplelogin
|
||||
# via watchtower
|
||||
botocore==1.35.99
|
||||
# via boto3
|
||||
# via s3transfer
|
||||
cachetools==5.5.0
|
||||
# via google-auth
|
||||
cbor2==5.6.5
|
||||
# via webauthn
|
||||
certifi==2024.12.14
|
||||
# via aiospamc
|
||||
# via requests
|
||||
# via sentry-sdk
|
||||
cffi==1.17.1
|
||||
# via bcrypt
|
||||
# via cryptography
|
||||
cfgv==3.4.0
|
||||
# via pre-commit
|
||||
chardet==4.0.0
|
||||
# via flanker
|
||||
# via requests
|
||||
click==8.1.8
|
||||
# via black
|
||||
# via djlint
|
||||
# via flask
|
||||
# via typer
|
||||
coinbase-commerce==1.0.1
|
||||
# via simplelogin
|
||||
colorama==0.4.6
|
||||
# via djlint
|
||||
coloredlogs==14.3
|
||||
# via simplelogin
|
||||
coverage==7.6.10
|
||||
# via pytest-cov
|
||||
crontab==0.22.8
|
||||
# via yacron
|
||||
cryptography==37.0.4
|
||||
# via flanker
|
||||
# via jwcrypto
|
||||
# via pgpy
|
||||
# via pyopenssl
|
||||
# via simplelogin
|
||||
# via webauthn
|
||||
decorator==5.1.1
|
||||
# via ipython
|
||||
deprecated==1.2.15
|
||||
# via jwcrypto
|
||||
# via limits
|
||||
# via simplelogin
|
||||
dill==0.3.9
|
||||
# via pylint
|
||||
distlib==0.3.9
|
||||
# via virtualenv
|
||||
djlint==1.3.0
|
||||
dkimpy==1.0.6
|
||||
# via simplelogin
|
||||
dnspython==2.6.1
|
||||
# via dkimpy
|
||||
# via email-validator
|
||||
# via simplelogin
|
||||
email-validator==1.1.3
|
||||
# via simplelogin
|
||||
facebook-sdk==3.1.0
|
||||
# via simplelogin
|
||||
filelock==3.16.1
|
||||
# via tldextract
|
||||
# via virtualenv
|
||||
flanker==0.9.11
|
||||
# via simplelogin
|
||||
flask==1.1.2
|
||||
# via flask-admin
|
||||
# via flask-cors
|
||||
# via flask-debugtoolbar
|
||||
# via flask-httpauth
|
||||
# via flask-limiter
|
||||
# via flask-login
|
||||
# via flask-migrate
|
||||
# via flask-profiler
|
||||
# via flask-sqlalchemy
|
||||
# via flask-wtf
|
||||
# via simplelogin
|
||||
flask-admin==1.5.8
|
||||
# via simplelogin
|
||||
flask-cors==3.0.10
|
||||
# via simplelogin
|
||||
flask-debugtoolbar==0.11.0
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
# via simplelogin
|
||||
flask-debugtoolbar-sqlalchemy==0.2.0
|
||||
# via simplelogin
|
||||
flask-httpauth==4.8.0
|
||||
# via flask-profiler
|
||||
flask-limiter==1.4
|
||||
# via simplelogin
|
||||
flask-login==0.5.0
|
||||
# via simplelogin
|
||||
flask-migrate==2.5.3
|
||||
# via simplelogin
|
||||
flask-profiler==1.8.1
|
||||
# via simplelogin
|
||||
flask-sqlalchemy==2.5.1
|
||||
# via flask-migrate
|
||||
flask-wtf==0.14.3
|
||||
# via simplelogin
|
||||
frozenlist==1.5.0
|
||||
# via aiohttp
|
||||
# via aiosignal
|
||||
future==1.0.0
|
||||
# via webauthn
|
||||
gevent==24.11.1
|
||||
# via simplelogin
|
||||
google-api-core==2.24.0
|
||||
# via google-api-python-client
|
||||
google-api-python-client==1.12.11
|
||||
# via simplelogin
|
||||
google-auth==2.37.0
|
||||
# via google-api-core
|
||||
# via google-api-python-client
|
||||
# via google-auth-httplib2
|
||||
google-auth-httplib2==0.0.4
|
||||
# via google-api-python-client
|
||||
# via simplelogin
|
||||
googleapis-common-protos==1.66.0
|
||||
# via google-api-core
|
||||
greenlet==3.1.1
|
||||
# via gevent
|
||||
gunicorn==20.0.4
|
||||
# via simplelogin
|
||||
html-tag-names==0.1.2
|
||||
# via djlint
|
||||
html-void-elements==0.1.0
|
||||
# via djlint
|
||||
httplib2==0.22.0
|
||||
# via google-api-python-client
|
||||
# via google-auth-httplib2
|
||||
humanfriendly==10.0
|
||||
# via coloredlogs
|
||||
identify==2.6.5
|
||||
# via pre-commit
|
||||
idna==2.10
|
||||
# via email-validator
|
||||
# via flanker
|
||||
# via requests
|
||||
# via tldextract
|
||||
# via yarl
|
||||
importlib-metadata==4.13.0
|
||||
# via djlint
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
ipython==7.31.1
|
||||
# via simplelogin
|
||||
isort==5.13.2
|
||||
# via pylint
|
||||
itsdangerous==1.1.0
|
||||
# via flask
|
||||
# via flask-debugtoolbar
|
||||
# via flask-wtf
|
||||
# via simplelogin
|
||||
jedi==0.19.2
|
||||
# via ipython
|
||||
jinja2==2.11.3
|
||||
# via flask
|
||||
# via yacron
|
||||
jmespath==1.0.1
|
||||
# via boto3
|
||||
# via botocore
|
||||
jwcrypto==0.9.1
|
||||
# via simplelogin
|
||||
lazy-object-proxy==1.10.0
|
||||
# via astroid
|
||||
limits==4.0.0
|
||||
# via flask-limiter
|
||||
loguru==0.7.3
|
||||
# via aiospamc
|
||||
mako==1.3.8
|
||||
# via alembic
|
||||
markupsafe==1.1.1
|
||||
# via jinja2
|
||||
# via mako
|
||||
# via simplelogin
|
||||
# via wtforms
|
||||
matplotlib-inline==0.1.7
|
||||
# via ipython
|
||||
mccabe==0.7.0
|
||||
# via pylint
|
||||
memory-profiler==0.57.0
|
||||
# via simplelogin
|
||||
multidict==6.1.0
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
mypy-extensions==1.0.0
|
||||
# via black
|
||||
newrelic==8.8.1
|
||||
# via simplelogin
|
||||
newrelic-telemetry-sdk==0.5.1
|
||||
# via simplelogin
|
||||
nodeenv==1.9.1
|
||||
# via pre-commit
|
||||
oauthlib==3.2.2
|
||||
# via requests-oauthlib
|
||||
packaging==24.2
|
||||
# via limits
|
||||
# via pytest
|
||||
parso==0.8.4
|
||||
# via jedi
|
||||
pathspec==0.9.0
|
||||
# via black
|
||||
# via djlint
|
||||
pexpect==4.9.0
|
||||
# via ipython
|
||||
pgpy==0.5.4
|
||||
# via simplelogin
|
||||
phpserialize==1.3
|
||||
# via simplelogin
|
||||
pickleshare==0.7.5
|
||||
# via ipython
|
||||
platformdirs==4.3.6
|
||||
# via black
|
||||
# via pylint
|
||||
# via virtualenv
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
ply==3.11
|
||||
# via flanker
|
||||
pre-commit==2.17.0
|
||||
prompt-toolkit==3.0.48
|
||||
# via ipython
|
||||
propcache==0.2.1
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
proto-plus==1.25.0
|
||||
# via google-api-core
|
||||
protobuf==5.29.3
|
||||
# via google-api-core
|
||||
# via googleapis-common-protos
|
||||
# via proto-plus
|
||||
psutil==6.1.1
|
||||
# via memory-profiler
|
||||
psycopg2-binary==2.9.10
|
||||
# via simplelogin
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
py==1.11.0
|
||||
# via pytest
|
||||
pyasn1==0.6.1
|
||||
# via pgpy
|
||||
# via pyasn1-modules
|
||||
# via rsa
|
||||
pyasn1-modules==0.4.1
|
||||
# via google-auth
|
||||
pycparser==2.22
|
||||
# via cffi
|
||||
pycryptodome==3.9.9
|
||||
# via simplelogin
|
||||
pygments==2.19.1
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
# via ipython
|
||||
pyjwt==2.10.1
|
||||
# via twilio
|
||||
pylint==2.14.5
|
||||
pyopenssl==19.1.0
|
||||
# via simplelogin
|
||||
# via webauthn
|
||||
pyotp==2.4.1
|
||||
# via simplelogin
|
||||
pyparsing==3.2.1
|
||||
# via httplib2
|
||||
pyre2==0.3.6
|
||||
# via simplelogin
|
||||
pyspf==2.0.14
|
||||
# via simplelogin
|
||||
pytest==7.0.1
|
||||
# via pytest-cov
|
||||
pytest-cov==3.0.0
|
||||
python-dateutil==2.9.0.post0
|
||||
# via arrow
|
||||
# via botocore
|
||||
# via strictyaml
|
||||
python-dotenv==0.14.0
|
||||
# via simplelogin
|
||||
python-gnupg==0.4.9
|
||||
# via simplelogin
|
||||
pytz==2024.2
|
||||
# via twilio
|
||||
# via yacron
|
||||
pyyaml==6.0.2
|
||||
# via djlint
|
||||
# via pre-commit
|
||||
redis==4.5.5
|
||||
# via simplelogin
|
||||
regex==2022.10.31
|
||||
# via djlint
|
||||
# via flanker
|
||||
requests==2.25.1
|
||||
# via coinbase-commerce
|
||||
# via facebook-sdk
|
||||
# via google-api-core
|
||||
# via requests-file
|
||||
# via requests-oauthlib
|
||||
# via simplelogin
|
||||
# via tldextract
|
||||
# via twilio
|
||||
requests-file==2.1.0
|
||||
# via tldextract
|
||||
requests-oauthlib==1.3.1
|
||||
# via simplelogin
|
||||
rsa==4.9
|
||||
# via google-auth
|
||||
ruamel-yaml==0.17.4
|
||||
# via yacron
|
||||
ruff==0.1.15
|
||||
s3transfer==0.10.4
|
||||
# via boto3
|
||||
sentry-sdk==2.20.0
|
||||
# via simplelogin
|
||||
# via yacron
|
||||
setuptools==75.8.0
|
||||
# via astroid
|
||||
# via gunicorn
|
||||
# via ipython
|
||||
# via zope-event
|
||||
# via zope-interface
|
||||
simplejson==3.19.3
|
||||
# via flask-profiler
|
||||
six==1.17.0
|
||||
# via coinbase-commerce
|
||||
# via flanker
|
||||
# via flask-cors
|
||||
# via flask-limiter
|
||||
# via google-api-python-client
|
||||
# via google-auth-httplib2
|
||||
# via jwcrypto
|
||||
# via pgpy
|
||||
# via pyopenssl
|
||||
# via python-dateutil
|
||||
# via sqlalchemy-utils
|
||||
# via webauthn
|
||||
sqlalchemy==1.3.24
|
||||
# via alembic
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
# via flask-sqlalchemy
|
||||
# via simplelogin
|
||||
# via sqlalchemy-utils
|
||||
sqlalchemy-utils==0.36.8
|
||||
# via simplelogin
|
||||
sqlparse==0.5.3
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
strictyaml==1.7.3
|
||||
# via yacron
|
||||
tld==0.13
|
||||
# via flanker
|
||||
tldextract==3.1.2
|
||||
# via simplelogin
|
||||
toml==0.10.2
|
||||
# via pre-commit
|
||||
tomli==2.2.1
|
||||
# via black
|
||||
# via coverage
|
||||
# via djlint
|
||||
# via pylint
|
||||
# via pytest
|
||||
tomlkit==0.13.2
|
||||
# via pylint
|
||||
tqdm==4.67.1
|
||||
# via djlint
|
||||
traitlets==5.14.3
|
||||
# via ipython
|
||||
# via matplotlib-inline
|
||||
twilio==7.3.2
|
||||
# via simplelogin
|
||||
typer==0.9.4
|
||||
# via aiospamc
|
||||
typing-extensions==4.12.2
|
||||
# via aiospamc
|
||||
# via alembic
|
||||
# via limits
|
||||
# via multidict
|
||||
# via typer
|
||||
unidecode==1.1.2
|
||||
# via simplelogin
|
||||
uritemplate==3.0.1
|
||||
# via google-api-python-client
|
||||
urllib3==1.26.20
|
||||
# via botocore
|
||||
# via newrelic-telemetry-sdk
|
||||
# via requests
|
||||
# via sentry-sdk
|
||||
virtualenv==20.29.0
|
||||
# via pre-commit
|
||||
watchtower==0.8.0
|
||||
# via simplelogin
|
||||
wcwidth==0.2.13
|
||||
# via prompt-toolkit
|
||||
webauthn==0.4.7
|
||||
# via simplelogin
|
||||
webob==1.8.9
|
||||
# via flanker
|
||||
werkzeug==1.0.1
|
||||
# via flask
|
||||
# via flask-debugtoolbar
|
||||
# via simplelogin
|
||||
wrapt==1.17.2
|
||||
# via astroid
|
||||
# via deprecated
|
||||
wtforms==2.3.3
|
||||
# via flask-admin
|
||||
# via flask-wtf
|
||||
# via simplelogin
|
||||
yacron==0.19.0
|
||||
# via simplelogin
|
||||
yarl==1.18.3
|
||||
# via aiohttp
|
||||
zipp==3.21.0
|
||||
# via importlib-metadata
|
||||
zope-event==5.0
|
||||
# via gevent
|
||||
zope-interface==7.2
|
||||
# via gevent
|
392
app/requirements.lock
Normal file
392
app/requirements.lock
Normal file
@ -0,0 +1,392 @@
|
||||
# generated by rye
|
||||
# use `rye lock` or `rye sync` to update this lockfile
|
||||
#
|
||||
# last locked with the following flags:
|
||||
# pre: false
|
||||
# features: []
|
||||
# all-features: false
|
||||
# with-sources: false
|
||||
# generate-hashes: false
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
aiohttp==3.8.4
|
||||
# via google-auth
|
||||
# via yacron
|
||||
aiosignal==1.2.0
|
||||
# via aiohttp
|
||||
aiosmtpd==1.4.2
|
||||
# via simplelogin
|
||||
aiosmtplib==1.1.4
|
||||
# via yacron
|
||||
aiospamc==0.10.0
|
||||
# via simplelogin
|
||||
alembic==1.4.3
|
||||
# via flask-migrate
|
||||
appnope==0.1.0
|
||||
# via ipython
|
||||
arrow==0.16.0
|
||||
# via simplelogin
|
||||
async-timeout==4.0.2
|
||||
# via aiohttp
|
||||
# via redis
|
||||
atpublic==2.0
|
||||
# via aiosmtpd
|
||||
attrs==20.2.0
|
||||
# via aiohttp
|
||||
# via aiosmtpd
|
||||
# via flanker
|
||||
backcall==0.2.0
|
||||
# via ipython
|
||||
bcrypt==3.2.0
|
||||
# via simplelogin
|
||||
blinker==1.4
|
||||
# via flask-debugtoolbar
|
||||
# via simplelogin
|
||||
boto3==1.35.99
|
||||
# via simplelogin
|
||||
# via watchtower
|
||||
botocore==1.35.99
|
||||
# via boto3
|
||||
# via s3transfer
|
||||
cachetools==4.1.1
|
||||
# via google-auth
|
||||
cbor2==5.2.0
|
||||
# via webauthn
|
||||
certifi==2019.11.28
|
||||
# via aiospamc
|
||||
# via requests
|
||||
# via sentry-sdk
|
||||
cffi==1.14.4
|
||||
# via bcrypt
|
||||
# via cryptography
|
||||
chardet==3.0.4
|
||||
# via flanker
|
||||
# via requests
|
||||
charset-normalizer==3.4.1
|
||||
# via aiohttp
|
||||
click==8.0.3
|
||||
# via flask
|
||||
# via typer
|
||||
coinbase-commerce==1.0.1
|
||||
# via simplelogin
|
||||
coloredlogs==14.0
|
||||
# via simplelogin
|
||||
crontab==0.22.8
|
||||
# via yacron
|
||||
cryptography==37.0.1
|
||||
# via flanker
|
||||
# via jwcrypto
|
||||
# via pgpy
|
||||
# via pyopenssl
|
||||
# via simplelogin
|
||||
# via webauthn
|
||||
decorator==4.4.2
|
||||
# via ipython
|
||||
deprecated==1.2.13
|
||||
# via simplelogin
|
||||
dkimpy==1.0.5
|
||||
# via simplelogin
|
||||
dnspython==2.6.1
|
||||
# via dkimpy
|
||||
# via email-validator
|
||||
# via simplelogin
|
||||
email-validator==1.1.3
|
||||
# via simplelogin
|
||||
facebook-sdk==3.1.0
|
||||
# via simplelogin
|
||||
filelock==3.15.4
|
||||
# via tldextract
|
||||
flanker==0.9.11
|
||||
# via simplelogin
|
||||
flask==1.1.2
|
||||
# via flask-admin
|
||||
# via flask-cors
|
||||
# via flask-debugtoolbar
|
||||
# via flask-httpauth
|
||||
# via flask-limiter
|
||||
# via flask-login
|
||||
# via flask-migrate
|
||||
# via flask-profiler
|
||||
# via flask-sqlalchemy
|
||||
# via flask-wtf
|
||||
# via simplelogin
|
||||
flask-admin==1.5.7
|
||||
# via simplelogin
|
||||
flask-cors==3.0.9
|
||||
# via simplelogin
|
||||
flask-debugtoolbar==0.11.0
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
# via simplelogin
|
||||
flask-debugtoolbar-sqlalchemy==0.2.0
|
||||
# via simplelogin
|
||||
flask-httpauth==4.1.0
|
||||
# via flask-profiler
|
||||
flask-limiter==1.4
|
||||
# via simplelogin
|
||||
flask-login==0.5.0
|
||||
# via simplelogin
|
||||
flask-migrate==2.5.3
|
||||
# via simplelogin
|
||||
flask-profiler==1.8.1
|
||||
# via simplelogin
|
||||
flask-sqlalchemy==2.5.1
|
||||
# via flask-migrate
|
||||
flask-wtf==0.14.3
|
||||
# via simplelogin
|
||||
frozenlist==1.3.3
|
||||
# via aiohttp
|
||||
# via aiosignal
|
||||
future==0.18.3
|
||||
# via webauthn
|
||||
gevent==24.11.1
|
||||
# via simplelogin
|
||||
google-api-core==1.22.2
|
||||
# via google-api-python-client
|
||||
google-api-python-client==1.12.3
|
||||
# via simplelogin
|
||||
google-auth==1.22.0
|
||||
# via google-api-core
|
||||
# via google-api-python-client
|
||||
# via google-auth-httplib2
|
||||
google-auth-httplib2==0.0.4
|
||||
# via google-api-python-client
|
||||
# via simplelogin
|
||||
googleapis-common-protos==1.52.0
|
||||
# via google-api-core
|
||||
greenlet==3.1.1
|
||||
# via gevent
|
||||
gunicorn==20.0.4
|
||||
# via simplelogin
|
||||
httplib2==0.22.0
|
||||
# via google-api-python-client
|
||||
# via google-auth-httplib2
|
||||
humanfriendly==8.2
|
||||
# via coloredlogs
|
||||
idna==2.10
|
||||
# via email-validator
|
||||
# via flanker
|
||||
# via requests
|
||||
# via tldextract
|
||||
# via yarl
|
||||
ipython==7.31.1
|
||||
# via simplelogin
|
||||
ipython-genutils==0.2.0
|
||||
# via traitlets
|
||||
itsdangerous==1.1.0
|
||||
# via flask
|
||||
# via flask-debugtoolbar
|
||||
# via flask-wtf
|
||||
# via simplelogin
|
||||
jedi==0.17.2
|
||||
# via ipython
|
||||
jinja2==2.11.3
|
||||
# via flask
|
||||
# via yacron
|
||||
jmespath==0.10.0
|
||||
# via boto3
|
||||
# via botocore
|
||||
jwcrypto==0.8
|
||||
# via simplelogin
|
||||
limits==1.5.1
|
||||
# via flask-limiter
|
||||
loguru==0.7.2
|
||||
# via aiospamc
|
||||
mako==1.2.4
|
||||
# via alembic
|
||||
markupsafe==1.1.1
|
||||
# via jinja2
|
||||
# via mako
|
||||
# via simplelogin
|
||||
# via wtforms
|
||||
matplotlib-inline==0.1.3
|
||||
# via ipython
|
||||
memory-profiler==0.57.0
|
||||
# via simplelogin
|
||||
multidict==4.7.6
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
newrelic==8.8.0
|
||||
# via simplelogin
|
||||
newrelic-telemetry-sdk==0.5.0
|
||||
# via simplelogin
|
||||
oauthlib==3.1.0
|
||||
# via requests-oauthlib
|
||||
parso==0.7.1
|
||||
# via jedi
|
||||
pexpect==4.8.0
|
||||
# via ipython
|
||||
pgpy==0.5.4
|
||||
# via simplelogin
|
||||
phpserialize==1.3
|
||||
# via simplelogin
|
||||
pickleshare==0.7.5
|
||||
# via ipython
|
||||
ply==3.11
|
||||
# via flanker
|
||||
prompt-toolkit==3.0.7
|
||||
# via ipython
|
||||
protobuf==5.27.1
|
||||
# via google-api-core
|
||||
# via googleapis-common-protos
|
||||
psutil==5.7.2
|
||||
# via memory-profiler
|
||||
psycopg2-binary==2.9.3
|
||||
# via simplelogin
|
||||
ptyprocess==0.6.0
|
||||
# via pexpect
|
||||
pyasn1==0.4.8
|
||||
# via pgpy
|
||||
# via pyasn1-modules
|
||||
# via rsa
|
||||
pyasn1-modules==0.2.8
|
||||
# via google-auth
|
||||
pycparser==2.20
|
||||
# via cffi
|
||||
pycryptodome==3.9.8
|
||||
# via simplelogin
|
||||
pygments==2.7.4
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
# via ipython
|
||||
pyjwt==2.4.0
|
||||
# via twilio
|
||||
pyopenssl==19.1.0
|
||||
# via simplelogin
|
||||
# via webauthn
|
||||
pyotp==2.4.0
|
||||
# via simplelogin
|
||||
pyparsing==2.4.7
|
||||
# via httplib2
|
||||
pyre2==0.3.6
|
||||
# via simplelogin
|
||||
pyspf==2.0.14
|
||||
# via simplelogin
|
||||
python-dateutil==2.8.1
|
||||
# via alembic
|
||||
# via arrow
|
||||
# via botocore
|
||||
# via strictyaml
|
||||
python-dotenv==0.14.0
|
||||
# via simplelogin
|
||||
python-editor==1.0.4
|
||||
# via alembic
|
||||
python-gnupg==0.4.6
|
||||
# via simplelogin
|
||||
pytz==2020.1
|
||||
# via google-api-core
|
||||
# via twilio
|
||||
# via yacron
|
||||
redis==4.5.5
|
||||
# via simplelogin
|
||||
regex==2023.12.25
|
||||
# via flanker
|
||||
requests==2.25.1
|
||||
# via coinbase-commerce
|
||||
# via facebook-sdk
|
||||
# via google-api-core
|
||||
# via requests-file
|
||||
# via requests-oauthlib
|
||||
# via simplelogin
|
||||
# via tldextract
|
||||
# via twilio
|
||||
requests-file==1.5.1
|
||||
# via tldextract
|
||||
requests-oauthlib==1.3.0
|
||||
# via simplelogin
|
||||
rsa==4.6
|
||||
# via google-auth
|
||||
ruamel-yaml==0.17.4
|
||||
# via strictyaml
|
||||
# via yacron
|
||||
s3transfer==0.10.4
|
||||
# via boto3
|
||||
sentry-sdk==2.20.0
|
||||
# via simplelogin
|
||||
# via yacron
|
||||
setuptools==67.6.0
|
||||
# via google-api-core
|
||||
# via google-auth
|
||||
# via gunicorn
|
||||
# via ipython
|
||||
# via zope-event
|
||||
# via zope-interface
|
||||
simplejson==3.17.2
|
||||
# via flask-profiler
|
||||
six==1.15.0
|
||||
# via bcrypt
|
||||
# via coinbase-commerce
|
||||
# via flanker
|
||||
# via flask-cors
|
||||
# via flask-limiter
|
||||
# via google-api-core
|
||||
# via google-api-python-client
|
||||
# via google-auth
|
||||
# via google-auth-httplib2
|
||||
# via limits
|
||||
# via pgpy
|
||||
# via pyopenssl
|
||||
# via python-dateutil
|
||||
# via requests-file
|
||||
# via sqlalchemy-utils
|
||||
# via webauthn
|
||||
sqlalchemy==1.3.24
|
||||
# via alembic
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
# via flask-sqlalchemy
|
||||
# via simplelogin
|
||||
# via sqlalchemy-utils
|
||||
sqlalchemy-utils==0.36.8
|
||||
# via simplelogin
|
||||
sqlparse==0.4.4
|
||||
# via flask-debugtoolbar-sqlalchemy
|
||||
strictyaml==1.1.0
|
||||
# via yacron
|
||||
tld==0.12.6
|
||||
# via flanker
|
||||
tldextract==3.1.2
|
||||
# via simplelogin
|
||||
traitlets==5.0.4
|
||||
# via ipython
|
||||
# via matplotlib-inline
|
||||
twilio==7.3.2
|
||||
# via simplelogin
|
||||
typer==0.9.0
|
||||
# via aiospamc
|
||||
typing-extensions==4.8.0
|
||||
# via aiospamc
|
||||
# via typer
|
||||
unidecode==1.1.1
|
||||
# via simplelogin
|
||||
uritemplate==3.0.1
|
||||
# via google-api-python-client
|
||||
urllib3==1.26.20
|
||||
# via botocore
|
||||
# via newrelic-telemetry-sdk
|
||||
# via requests
|
||||
# via sentry-sdk
|
||||
watchtower==0.8.0
|
||||
# via simplelogin
|
||||
wcwidth==0.2.5
|
||||
# via prompt-toolkit
|
||||
webauthn==0.4.7
|
||||
# via simplelogin
|
||||
webob==1.8.7
|
||||
# via flanker
|
||||
werkzeug==1.0.1
|
||||
# via flask
|
||||
# via flask-debugtoolbar
|
||||
# via simplelogin
|
||||
wrapt==1.15.0
|
||||
# via deprecated
|
||||
wtforms==2.3.3
|
||||
# via flask-admin
|
||||
# via flask-wtf
|
||||
# via simplelogin
|
||||
yacron==0.19.0
|
||||
# via simplelogin
|
||||
yarl==1.9.2
|
||||
# via aiohttp
|
||||
zope-event==5.0
|
||||
# via gevent
|
||||
zope-interface==7.2
|
||||
# via gevent
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user