Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
026865e5bf | |||
add94ef2a2 |
@ -179,6 +179,7 @@ AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
|
|||||||
BUCKET = os.environ.get("BUCKET")
|
BUCKET = os.environ.get("BUCKET")
|
||||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
||||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
||||||
|
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
|
||||||
|
|
||||||
# Paddle
|
# Paddle
|
||||||
try:
|
try:
|
||||||
|
@ -13,17 +13,29 @@ from app.config import (
|
|||||||
LOCAL_FILE_UPLOAD,
|
LOCAL_FILE_UPLOAD,
|
||||||
UPLOAD_DIR,
|
UPLOAD_DIR,
|
||||||
URL,
|
URL,
|
||||||
|
AWS_ENDPOINT_URL,
|
||||||
)
|
)
|
||||||
|
from app.log import LOG
|
||||||
if not LOCAL_FILE_UPLOAD:
|
|
||||||
_session = boto3.Session(
|
|
||||||
aws_access_key_id=AWS_ACCESS_KEY_ID,
|
|
||||||
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
|
|
||||||
region_name=AWS_REGION,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
_s3_client = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_s3client():
|
||||||
|
global _s3_client
|
||||||
|
if _s3_client is None:
|
||||||
|
args = {
|
||||||
|
"aws_access_key_id": AWS_ACCESS_KEY_ID,
|
||||||
|
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
|
||||||
|
"region_name": AWS_REGION,
|
||||||
|
}
|
||||||
|
if AWS_ENDPOINT_URL:
|
||||||
|
args["endpoint_url"] = AWS_ENDPOINT_URL
|
||||||
|
_s3_client = boto3.client("s3", **args)
|
||||||
|
return _s3_client
|
||||||
|
|
||||||
|
|
||||||
|
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
||||||
bs.seek(0)
|
bs.seek(0)
|
||||||
|
|
||||||
if LOCAL_FILE_UPLOAD:
|
if LOCAL_FILE_UPLOAD:
|
||||||
@ -34,7 +46,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
|||||||
f.write(bs.read())
|
f.write(bs.read())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
_get_s3client().put_object(
|
||||||
|
Bucket=BUCKET,
|
||||||
Key=key,
|
Key=key,
|
||||||
Body=bs,
|
Body=bs,
|
||||||
ContentType=content_type,
|
ContentType=content_type,
|
||||||
@ -52,7 +65,8 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
|||||||
f.write(bs.read())
|
f.write(bs.read())
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
_get_s3client().put_object(
|
||||||
|
Bucket=BUCKET,
|
||||||
Key=path,
|
Key=path,
|
||||||
Body=bs,
|
Body=bs,
|
||||||
# Support saving a remote file using Http header
|
# Support saving a remote file using Http header
|
||||||
@ -67,12 +81,9 @@ def download_email(path: str) -> Optional[str]:
|
|||||||
file_path = os.path.join(UPLOAD_DIR, path)
|
file_path = os.path.join(UPLOAD_DIR, path)
|
||||||
with open(file_path, "rb") as f:
|
with open(file_path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
resp = (
|
resp = _get_s3client().get_object(
|
||||||
_session.resource("s3")
|
Bucket=BUCKET,
|
||||||
.Bucket(BUCKET)
|
Key=path,
|
||||||
.get_object(
|
|
||||||
Key=path,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if not resp or "Body" not in resp:
|
if not resp or "Body" not in resp:
|
||||||
return None
|
return None
|
||||||
@ -88,8 +99,7 @@ def get_url(key: str, expires_in=3600) -> str:
|
|||||||
if LOCAL_FILE_UPLOAD:
|
if LOCAL_FILE_UPLOAD:
|
||||||
return URL + "/static/upload/" + key
|
return URL + "/static/upload/" + key
|
||||||
else:
|
else:
|
||||||
s3_client = _session.client("s3")
|
return _get_s3client().generate_presigned_url(
|
||||||
return s3_client.generate_presigned_url(
|
|
||||||
ExpiresIn=expires_in,
|
ExpiresIn=expires_in,
|
||||||
ClientMethod="get_object",
|
ClientMethod="get_object",
|
||||||
Params={"Bucket": BUCKET, "Key": key},
|
Params={"Bucket": BUCKET, "Key": key},
|
||||||
@ -100,5 +110,15 @@ def delete(path: str):
|
|||||||
if LOCAL_FILE_UPLOAD:
|
if LOCAL_FILE_UPLOAD:
|
||||||
os.remove(os.path.join(UPLOAD_DIR, path))
|
os.remove(os.path.join(UPLOAD_DIR, path))
|
||||||
else:
|
else:
|
||||||
o = _session.resource("s3").Bucket(BUCKET).Object(path)
|
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
|
||||||
o.delete()
|
|
||||||
|
|
||||||
|
def create_bucket_if_not_exists():
|
||||||
|
s3client = _get_s3client()
|
||||||
|
buckets = s3client.list_buckets()
|
||||||
|
for bucket in buckets["Buckets"]:
|
||||||
|
if bucket["Name"] == BUCKET:
|
||||||
|
LOG.i("Bucket already exists")
|
||||||
|
return
|
||||||
|
s3client.create_bucket(Bucket=BUCKET)
|
||||||
|
LOG.i(f"Bucket {BUCKET} created")
|
||||||
|
@ -407,8 +407,10 @@ def jinja2_filter(app):
|
|||||||
|
|
||||||
@app.context_processor
|
@app.context_processor
|
||||||
def inject_stage_and_region():
|
def inject_stage_and_region():
|
||||||
|
now = arrow.now()
|
||||||
return dict(
|
return dict(
|
||||||
YEAR=arrow.now().year,
|
YEAR=now.year,
|
||||||
|
NOW=now,
|
||||||
URL=URL,
|
URL=URL,
|
||||||
SENTRY_DSN=SENTRY_FRONT_END_DSN,
|
SENTRY_DSN=SENTRY_FRONT_END_DSN,
|
||||||
VERSION=SHA1,
|
VERSION=SHA1,
|
||||||
|
@ -86,7 +86,7 @@
|
|||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="page">
|
<div class="page">
|
||||||
{% if current_user.is_authenticated and current_user.should_show_upgrade_button() %}
|
{% if NOW.timestamp < 1701475201 and current_user.is_authenticated and current_user.should_show_upgrade_button() %}
|
||||||
|
|
||||||
<div class="alert alert-success text-center mb-0" role="alert">
|
<div class="alert alert-success text-center mb-0" role="alert">
|
||||||
Black Friday: $20 for the first year instead of $30. Available until December 1st.
|
Black Friday: $20 for the first year instead of $30. Available until December 1st.
|
||||||
|
@ -57,19 +57,22 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block default_content %}
|
{% block default_content %}
|
||||||
|
|
||||||
<div class="alert alert-info">
|
{% if NOW.timestamp < 1701475201 %}
|
||||||
Black Friday Deal: 33% off on the yearly plan for the <b>first</b> year ($20 instead of $30).
|
|
||||||
<br>
|
<div class="alert alert-info">
|
||||||
Please use this coupon code
|
Black Friday Deal: 33% off on the yearly plan for the <b>first</b> year ($20 instead of $30).
|
||||||
<em data-toggle="tooltip"
|
<br>
|
||||||
title="Click to copy"
|
Please use this coupon code
|
||||||
class="clipboard"
|
<em data-toggle="tooltip"
|
||||||
data-clipboard-text="BF2023">BF2023</em> during the checkout.
|
title="Click to copy"
|
||||||
<br>
|
class="clipboard"
|
||||||
<img src="/static/images/coupon.png" class="m-2" style="max-width: 300px">
|
data-clipboard-text="BF2023">BF2023</em> during the checkout.
|
||||||
<br>
|
<br>
|
||||||
Available until December 1, 2023.
|
<img src="/static/images/coupon.png" class="m-2" style="max-width: 300px">
|
||||||
</div>
|
<br>
|
||||||
|
Available until December 1, 2023.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
<div class="pb-8">
|
<div class="pb-8">
|
||||||
<div class="text-center mx-md-auto mb-8 mt-6">
|
<div class="text-center mx-md-auto mb-8 mt-6">
|
||||||
<h1>Upgrade to unlock premium features</h1>
|
<h1>Upgrade to unlock premium features</h1>
|
||||||
|
Reference in New Issue
Block a user