1
0
Fork 0

Revert formatting changes

This commit is contained in:
Reinout Meliesie 2025-03-11 10:25:28 +01:00
parent 64ea602f02
commit 1e0d547db5
Signed by: zedfrigg
GPG key ID: 3AFCC06481308BC6
3 changed files with 696 additions and 625 deletions

View file

@ -1,94 +1,97 @@
""" Access the activity streams stored in Redis """
from bookwyrm import models
from bookwyrm . redis_store import RedisStore , r
from bookwyrm . tasks import app , STREAMS , IMPORT_TRIGGERED
""" access the activity streams stored in redis """
from datetime import timedelta
from django.dispatch import receiver
from django.db import transaction
from django.db.models import signals, Q
from django.utils import timezone
from bookwyrm import models
from bookwyrm.redis_store import RedisStore, r
from bookwyrm.tasks import app, STREAMS, IMPORT_TRIGGERED
class ActivityStream(RedisStore):
"""A category of activity stream (like home, local, books)"""
"""a category of activity stream (like home, local, books)"""
def stream_id(self, user_id):
"""The redis key for this user's instance of this stream"""
"""the redis key for this user's instance of this stream"""
return f"{user_id}-{self.key}"
def unread_id(self, user_id):
"""The redis key for this user's unread count for this stream"""
"""the redis key for this user's unread count for this stream"""
stream_id = self.stream_id(user_id)
return f"{stream_id}-unread"
def unread_by_status_type_id(self, user_id):
"""The redis key for this user's unread count for this stream"""
"""the redis key for this user's unread count for this stream"""
stream_id = self.stream_id(user_id)
return f"{stream_id}-unread-by-type"
def get_rank(self, obj):
"""Statuses are sorted by date published"""
"""statuses are sorted by date published"""
return obj.published_date.timestamp()
def add_status(self, status, increment_unread=False):
"""Add a status to users' feeds"""
"""add a status to users' feeds"""
audience = self.get_audience(status)
# The pipeline contains all the add-to-stream activities
# the pipeline contains all the add-to-stream activities
pipeline = self.add_object_to_stores(
status ,
self . get_stores_for_users (audience) ,
execute = False ,
status, self.get_stores_for_users(audience), execute=False
)
if increment_unread:
for user_id in audience:
# Add to the unread status count
# add to the unread status count
pipeline.incr(self.unread_id(user_id))
# Add to the unread status count for status type
# add to the unread status count for status type
pipeline.hincrby(
self . unread_by_status_type_id (user_id) ,
get_status_type (status) ,
1 ,
self.unread_by_status_type_id(user_id), get_status_type(status), 1
)
# And go!
# and go!
pipeline.execute()
def add_user_statuses(self, viewer, user):
"""Add a user's statuses to another user's feed"""
# Only add the statuses that the viewer should be able to see (ie, not DMs)
"""add a user's statuses to another user's feed"""
# only add the statuses that the viewer should be able to see (ie, not dms)
statuses = models.Status.privacy_filter(viewer).filter(user=user)
self.bulk_add_objects_to_store(statuses, self.stream_id(viewer.id))
def remove_user_statuses(self, viewer, user):
"""Remove a user's status from another user's feed"""
# Remove all so that followers only statuses are removed
"""remove a user's status from another user's feed"""
# remove all so that followers only statuses are removed
statuses = user.status_set.all()
self.bulk_remove_objects_from_store(statuses, self.stream_id(viewer.id))
def get_activity_stream(self, user):
"""Load the statuses to be displayed"""
# Clear unreads for this feed
"""load the statuses to be displayed"""
# clear unreads for this feed
r.set(self.unread_id(user.id), 0)
r.delete(self.unread_by_status_type_id(user.id))
statuses = self.get_store(self.stream_id(user.id))
return models . Status . objects . select_subclasses ()
return (
models.Status.objects.select_subclasses()
.filter(id__in=statuses)
. select_related ( "user" , "reply_parent" , "comment__book" , "review__book" , "quotation__book" )
.select_related(
"user",
"reply_parent",
"comment__book",
"review__book",
"quotation__book",
)
.prefetch_related("mention_books", "mention_users")
.order_by("-published_date")
)
def get_unread_count(self, user):
"""Get the unread status count for this user's feed"""
"""get the unread status count for this user's feed"""
return int(r.get(self.unread_id(user.id)) or 0)
def get_unread_count_by_status_type(self, user):
"""Get the unread status count for this user's feed's status types"""
"""get the unread status count for this user's feed's status types"""
status_types = r.hgetall(self.unread_by_status_type_id(user.id))
return {
str(key.decode("utf-8")): int(value) or 0
@ -96,54 +99,59 @@ class ActivityStream (RedisStore) :
}
def populate_streams(self, user):
"""Go from zero to a timeline"""
"""go from zero to a timeline"""
self.populate_store(self.stream_id(user.id))
def _get_audience ( self , status) :
"""Given a status, what users should see it, excluding the author"""
# Direct messages don't appear in feeds, direct comments/reviews/etc do
def _get_audience(self, status): # pylint: disable=no-self-use
"""given a status, what users should see it, excluding the author"""
# direct messages don't appear in feeds, direct comments/reviews/etc do
if status.privacy == "direct" and status.status_type == "Note":
return models.User.objects.none()
# Everybody who could plausibly see this status
audience = models . User . objects
. filter ( is_active = True , local = True ) # We only create feeds for users of this instance
. exclude (
Q ( id__in = status . user . blocks . all () ) |
Q ( blocks = status . user) # Not blocked
# everybody who could plausibly see this status
audience = models.User.objects.filter(
is_active=True,
local=True, # we only create feeds for users of this instance
).exclude(
Q(id__in=status.user.blocks.all()) | Q(blocks=status.user) # not blocked
)
# Only visible to the poster and mentioned users
# only visible to the poster and mentioned users
if status.privacy == "direct":
audience = audience . filter ( Q ( id__in = status . mention_users . all () ) ) # If the user is mentioned
audience = audience.filter(
Q(id__in=status.mention_users.all()) # if the user is mentioned
)
# Don't show replies to statuses the user can't see
# don't show replies to statuses the user can't see
elif status.reply_parent and status.reply_parent.privacy == "followers":
audience = audience.filter(
Q ( id = status . reply_parent . user . id ) | # If the user is the OG author
( Q ( following = status . user ) & Q ( following = status . reply_parent . user ) ) # If the user is following both authors
Q(id=status.reply_parent.user.id) # if the user is the OG author
| (
Q(following=status.user) & Q(following=status.reply_parent.user)
) # if the user is following both authors
)
# Only visible to the poster's followers and tagged users
# only visible to the poster's followers and tagged users
elif status.privacy == "followers":
audience = audience . filter ( Q ( following = status . user ) ) # If the user is following the author
audience = audience.filter(
Q(following=status.user) # if the user is following the author
)
return audience.distinct("id")
def get_audience(self, status):
"""Given a status, what users should see it"""
"""given a status, what users should see it"""
audience = self._get_audience(status).values_list("id", flat=True)
status_author = models . User . objects
. filter ( is_active = True , local = True , id = status . user . id )
. values_list ( "id" , flat = True )
status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id
).values_list("id", flat=True)
return list(set(audience) | set(status_author))
def get_stores_for_users(self, user_ids):
"""Convert a list of user ids into Redis store ids"""
"""convert a list of user ids into redis store ids"""
return [self.stream_id(user_id) for user_id in user_ids]
def get_statuses_for_user ( self , user ) :
"""Given a user, what statuses should they see on this stream"""
def get_statuses_for_user(self, user): # pylint: disable=no-self-use
"""given a user, what statuses should they see on this stream"""
return models.Status.privacy_filter(
user,
privacy_levels=["public", "unlisted", "followers"],
@ -155,91 +163,104 @@ class ActivityStream (RedisStore) :
class HomeStream(ActivityStream):
"""Users you follow"""
"""users you follow"""
key = "home"
def get_audience(self, status):
audience = super()._get_audience(status)
# If the user is following the author
# if the user is following the author
audience = audience.filter(following=status.user).values_list("id", flat=True)
# If the user is the post's author
status_author = models . User . objects
. filter ( is_active = True , local = True , id = status . user . id )
. values_list ( "id" , flat = True )
# if the user is the post's author
status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id
).values_list("id", flat=True)
return list(set(audience) | set(status_author))
def get_statuses_for_user(self, user):
return models.Status.privacy_filter(
user,
privacy_levels=["public", "unlisted", "followers"],
) . exclude ( ~ Q ( # Remove everything except
Q ( user__followers = user ) | # User following
Q ( user = user ) | # Is self
Q ( mention_users = user ) # Mentions user
) )
).exclude(
~Q( # remove everything except
Q(user__followers=user) # user following
| Q(user=user) # is self
| Q(mention_users=user) # mentions user
),
)
class LocalStream(ActivityStream):
"""Users you follow"""
"""users you follow"""
key = "local"
def get_audience(self, status):
# This stream wants no part in non-public statuses
# this stream wants no part in non-public statuses
if status.privacy != "public" or not status.user.local:
return []
return super().get_audience(status)
def get_statuses_for_user(self, user):
# All public statuses by a local user
return models . Status
. privacy_filter ( user , privacy_levels = [ "public" ] )
. filter ( user__local = True )
# all public statuses by a local user
return models.Status.privacy_filter(
user,
privacy_levels=["public"],
).filter(user__local=True)
class BooksStream(ActivityStream):
"""Books on your shelves"""
"""books on your shelves"""
key = "books"
def _get_audience(self, status):
"""Anyone with the mentioned book on their shelves"""
work = status . book . parent_work if hasattr ( status , "book" )
"""anyone with the mentioned book on their shelves"""
work = (
status.book.parent_work
if hasattr(status, "book")
else status.mention_books.first().parent_work
)
audience = super()._get_audience(status)
return audience.filter(shelfbook__book__parent_work=work)
def get_audience(self, status):
# Only show public statuses on the books feed, and only statuses that mention books
if (
status . privacy != "public" or
not ( status . mention_books . exists () or hasattr ( status , "book" ) )
# only show public statuses on the books feed,
# and only statuses that mention books
if status.privacy != "public" or not (
status.mention_books.exists() or hasattr(status, "book")
):
return []
return super().get_audience(status)
def get_statuses_for_user(self, user):
"""Any public status that mentions the user's books"""
books = user . shelfbook_set
. values_list ( "book__parent_work__id" , flat = True )
. distinct ()
return models . Status
. privacy_filter ( user , privacy_levels = [ "public" ] )
"""any public status that mentions the user's books"""
books = user.shelfbook_set.values_list(
"book__parent_work__id", flat=True
).distinct()
return (
models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
.filter(
Q ( comment__book__parent_work__id__in = books ) |
Q ( quotation__book__parent_work__id__in = books ) |
Q ( review__book__parent_work__id__in = books ) |
Q ( mention_books__parent_work__id__in = books )
Q(comment__book__parent_work__id__in=books)
| Q(quotation__book__parent_work__id__in=books)
| Q(review__book__parent_work__id__in=books)
| Q(mention_books__parent_work__id__in=books)
)
.distinct()
)
def add_book_statuses(self, user, book):
"""Add statuses about a book to a user's feed"""
"""add statuses about a book to a user's feed"""
work = book.parent_work
statuses = models . Status . privacy_filter ( user , privacy_levels = [ "public" ] )
statuses = models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
book_comments = statuses.filter(Q(comment__book__parent_work=work))
book_quotations = statuses.filter(Q(quotation__book__parent_work=work))
@ -252,9 +273,12 @@ class BooksStream (ActivityStream) :
self.bulk_add_objects_to_store(book_mentions, self.stream_id(user.id))
def remove_book_statuses(self, user, book):
"""Add statuses about a book to a user's feed"""
"""add statuses about a book to a user's feed"""
work = book.parent_work
statuses = models . Status . privacy_filter ( user , privacy_levels = [ "public" ] )
statuses = models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
book_comments = statuses.filter(Q(comment__book__parent_work=work))
book_quotations = statuses.filter(Q(quotation__book__parent_work=work))
@ -267,7 +291,7 @@ class BooksStream (ActivityStream) :
self.bulk_remove_objects_from_store(book_mentions, self.stream_id(user.id))
# Determine which streams are enabled in settings.py
# determine which streams are enabled in settings.py
streams = {
"home": HomeStream(),
"local": LocalStream(),
@ -276,9 +300,10 @@ streams = {
@receiver(signals.post_save)
# pylint: disable=unused-argument
def add_status_on_create(sender, instance, created, *args, **kwargs):
"""Add newly created statuses to activity feeds"""
# We're only interested in new statuses
"""add newly created statuses to activity feeds"""
# we're only interested in new statuses
if not issubclass(sender, models.Status):
return
@ -286,32 +311,33 @@ def add_status_on_create ( sender , instance , created , * args , ** kwargs ) :
remove_status_task.delay(instance.id)
return
# We don't want to create multiple add_status_tasks for each status, and because the transactions are atomic,
# on_commit won't run until the status is ready to add.
# We don't want to create multiple add_status_tasks for each status, and because
# the transactions are atomic, on_commit won't run until the status is ready to add.
if not created:
return
# When creating new things, gotta wait on the transaction
transaction . on_commit ( lambda : add_status_on_create_command ( sender , instance , created ) )
# when creating new things, gotta wait on the transaction
transaction.on_commit(
lambda: add_status_on_create_command(sender, instance, created)
)
def add_status_on_create_command(sender, instance, created):
"""Runs this code only after the database commit completes"""
# Boosts trigger 'saves" twice, so don't bother duplicating the task
"""runs this code only after the database commit completes"""
# boosts trigger 'saves" twice, so don't bother duplicating the task
if sender == models.Boost and not created:
return
priority = STREAMS
# Check if this is an old status, de-prioritize if so
# check if this is an old status, de-prioritize if so
# (this will happen if federation is very slow, or, more expectedly, on csv import)
if (
instance . published_date < timezone . now () - timedelta ( days = 1 ) or
instance.created_date < instance.published_date - timedelta(days=1)
) :
# A backdated status from a local user is an import, don't add it
if instance.published_date < timezone.now() - timedelta(
days=1
) or instance.created_date < instance.published_date - timedelta(days=1):
# a backdated status from a local user is an import, don't add it
if instance.user.local:
return
# An out of date remote status is a low priority but should be added
# an out of date remote status is a low priority but should be added
priority = IMPORT_TRIGGERED
add_status_task.apply_async(
@ -325,57 +351,56 @@ def add_status_on_create_command ( sender , instance , created ) :
@receiver(signals.post_delete, sender=models.Boost)
# pylint: disable=unused-argument
def remove_boost_on_delete(sender, instance, *args, **kwargs):
"""Boosts are deleted"""
# Remove the boost
"""boosts are deleted"""
# remove the boost
remove_status_task.delay(instance.id)
# Re-add the original status
# re-add the original status
add_status_task.delay(instance.boosted_status.id)
@receiver(signals.post_save, sender=models.UserFollows)
# pylint: disable=unused-argument
def add_statuses_on_follow(sender, instance, created, *args, **kwargs):
"""Add a newly followed user's statuses to feeds"""
"""add a newly followed user's statuses to feeds"""
if not created or not instance.user_subject.local:
return
add_user_statuses_task.delay(
instance . user_subject . id ,
instance . user_object . id ,
stream_list = [ "home" ] ,
instance.user_subject.id, instance.user_object.id, stream_list=["home"]
)
@receiver(signals.post_delete, sender=models.UserFollows)
# pylint: disable=unused-argument
def remove_statuses_on_unfollow(sender, instance, *args, **kwargs):
"""Remove statuses from a feed on unfollow"""
"""remove statuses from a feed on unfollow"""
if not instance.user_subject.local:
return
remove_user_statuses_task.delay(
instance . user_subject . id ,
instance . user_object . id ,
stream_list = [ "home" ] ,
instance.user_subject.id, instance.user_object.id, stream_list=["home"]
)
@receiver(signals.post_save, sender=models.UserBlocks)
# pylint: disable=unused-argument
def remove_statuses_on_block(sender, instance, *args, **kwargs):
"""Remove statuses from all feeds on block"""
# Blocks apply ot all feeds
"""remove statuses from all feeds on block"""
# blocks apply ot all feeds
if instance.user_subject.local:
remove_user_statuses_task.delay(
instance . user_subject . id ,
instance . user_object . id ,
instance.user_subject.id, instance.user_object.id
)
# And in both directions
# and in both directions
if instance.user_object.local:
remove_user_statuses_task.delay(
instance . user_object . id ,
instance . user_subject . id ,
instance.user_object.id, instance.user_subject.id
)
@receiver(signals.post_delete, sender=models.UserBlocks)
# pylint: disable=unused-argument
def add_statuses_on_unblock(sender, instance, *args, **kwargs):
"""add statuses back to all feeds on unblock"""
# make sure there isn't a block in the other direction
@ -405,6 +430,7 @@ def add_statuses_on_unblock(sender, instance, *args, **kwargs):
@receiver(signals.post_save, sender=models.User)
# pylint: disable=unused-argument
def populate_streams_on_account_create(sender, instance, created, *args, **kwargs):
"""build a user's feeds when they join"""
if not created or not instance.local:
@ -421,6 +447,7 @@ def populate_streams_on_account_create_command(instance_id):
@receiver(signals.pre_save, sender=models.ShelfBook)
# pylint: disable=unused-argument
def add_statuses_on_shelve(sender, instance, *args, **kwargs):
"""update books stream when user shelves a book"""
if not instance.user.local:
@ -436,6 +463,7 @@ def add_statuses_on_shelve(sender, instance, *args, **kwargs):
@receiver(signals.post_delete, sender=models.ShelfBook)
# pylint: disable=unused-argument
def remove_statuses_on_unshelve(sender, instance, *args, **kwargs):
"""update books stream when user unshelves a book"""
if not instance.user.local:

View file

@ -1,15 +1,12 @@
"""Do further startup configuration and initialization"""
import logging
import os
import urllib
import logging
from bookwyrm import settings
from django.apps import AppConfig
from bookwyrm import settings
logger = logging.getLogger(__name__)
@ -26,7 +23,7 @@ def download_file ( url , destination) :
logger.error("Failed to download file %s: %s", url, err)
except OSError as err:
logger.error("Couldn't open font file %s for writing: %s", destination, err)
except Exception as err :
except Exception as err: # pylint:disable=broad-except
logger.error("Unknown error in file download: %s", err)
@ -37,7 +34,7 @@ class BookwyrmConfig (AppConfig) :
verbose_name = "BookWyrm"
def ready(self):
"""Set up preview image files, if desired"""
"""set up OTLP and preview image files, if desired"""
if settings.ENABLE_PREVIEW_IMAGES and settings.FONTS:
# Download any fonts that we don't have yet
logger.debug("Downloading fonts..")

View file

@ -1,17 +1,17 @@
""" Bookwyrm settings and configuration """
""" bookwyrm settings and configuration """
import os
import requests
from django . core . exceptions import ImproperlyConfigured
from django . utils . translation import gettext_lazy as _
from environs import Env
from typing import AnyStr
from environs import Env
import requests
from django.utils.translation import gettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
# pylint: disable=line-too-long
env = Env()
env.read_env("/etc/bookwyrm/config.env", recurse=False)
DOMAIN = env("DOMAIN")
@ -29,12 +29,13 @@ RELEASE_API = env (
PAGE_LENGTH = env.int("PAGE_LENGTH", 15)
DEFAULT_LANGUAGE = env("DEFAULT_LANGUAGE", "English")
# TODO: Extend maximum age to 1 year once termination of active sessions is implemented (see bookwyrm-social#2278, bookwyrm-social#3082).
# TODO: extend maximum age to 1 year once termination of active sessions
# is implemented (see bookwyrm-social#2278, bookwyrm-social#3082).
SESSION_COOKIE_AGE = env.int("SESSION_COOKIE_AGE", 3600 * 24 * 30) # 1 month
JS_CACHE = "8a89cad7"
# Email
# email
EMAIL_BACKEND = env("EMAIL_BACKEND", "django.core.mail.backends.smtp.EmailBackend")
EMAIL_HOST = env("EMAIL_HOST")
EMAIL_PORT = env.int("EMAIL_PORT", 587)
@ -48,7 +49,9 @@ EMAIL_SENDER = f"{EMAIL_SENDER_NAME}@{EMAIL_SENDER_DOMAIN}"
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR: AnyStr = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOCALE_PATHS = [ os . path . join ( BASE_DIR , "locale" ) ]
LOCALE_PATHS = [
os.path.join(BASE_DIR, "locale"),
]
LANGUAGE_COOKIE_NAME = env.str("LANGUAGE_COOKIE_NAME", "django_language")
STATIC_ROOT = os.path.join(BASE_DIR, env("STATIC_ROOT", "static"))
@ -77,11 +80,11 @@ FONT_DIR = os . path . join ( STATIC_ROOT , "fonts" )
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: Don't run with debug turned on in production
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", True)
USE_HTTPS = env.bool("USE_HTTPS", not DEBUG)
# SECURITY WARNING: Keep the secret key used in production secret
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env("SECRET_KEY")
if not DEBUG and SECRET_KEY == "7(2w1sedok=aznpq)ta1mc4i%4h=xx@hxwx*o57ctsuml0x%fr":
raise ImproperlyConfigured("You must change the SECRET_KEY env variable")
@ -145,25 +148,39 @@ TEMPLATES = [
LOG_LEVEL = env("LOG_LEVEL", "INFO").upper()
# Override aspects of the default handler to our taste
# See https://docs.djangoproject.com/en/3.2/topics/logging/#default-logging-configuration for a reference to the defaults we're overriding
# It seems that in order to override anything you have to include its entire dependency tree (handlers and filters) which makes this a bit verbose
# See https://docs.djangoproject.com/en/3.2/topics/logging/#default-logging-configuration
# for a reference to the defaults we're overriding
#
# It seems that in order to override anything you have to include its
# entire dependency tree (handlers and filters) which makes this a
# bit verbose
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"filters": {
# These are copied from the default configuration, required for implementing mail_admins below
"require_debug_false" : { "()" : "django.utils.log.RequireDebugFalse" } ,
"require_debug_true": { "()" : "django.utils.log.RequireDebugTrue" } ,
"ignore_missing_variable" : { "()" : "bookwyrm.utils.log.IgnoreVariableDoesNotExist" } ,
# These are copied from the default configuration, required for
# implementing mail_admins below
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
},
"require_debug_true": {
"()": "django.utils.log.RequireDebugTrue",
},
"ignore_missing_variable": {
"()": "bookwyrm.utils.log.IgnoreVariableDoesNotExist",
},
},
"handlers": {
# Overrides the default handler to make it log to console regardless of the DEBUG setting (default is to not log to console if DEBUG=False)
# Overrides the default handler to make it log to console
# regardless of the DEBUG setting (default is to not log to
# console if DEBUG=False)
"console": {
"level": LOG_LEVEL,
"filters": ["ignore_missing_variable"],
"class": "logging.StreamHandler",
},
# This is copied as-is from the default logger, and is required for the Django section below
# This is copied as-is from the default logger, and is
# required for the django section below
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
@ -171,13 +188,16 @@ LOGGING = {
},
},
"loggers": {
# Install our new console handler for Django's logger, and override the log level while we're at it
# Install our new console handler for Django's logger, and
# override the log level while we're at it
"django": {
"handlers": ["console", "mail_admins"],
"level": LOG_LEVEL,
},
"django.utils.autoreload" : { "level" : "INFO" } ,
# Add a Bookwyrm-specific logger
"django.utils.autoreload": {
"level": "INFO",
},
# Add a bookwyrm-specific logger
"bookwyrm": {
"handlers": ["console"],
"level": LOG_LEVEL,
@ -192,16 +212,16 @@ STATICFILES_FINDERS = [
]
SASS_PROCESSOR_INCLUDE_FILE_PATTERN = r"^.+\.[s]{0,1}(?:a|c)ss$"
# When debug is disabled, make sure to compile themes once with `./bw-dev compile_themes`
# when debug is disabled, make sure to compile themes once with `./bw-dev compile_themes`
SASS_PROCESSOR_ENABLED = DEBUG
# Minify CSS in production but not dev
# minify css is production but not dev
if not DEBUG:
SASS_OUTPUT_STYLE = "compressed"
WSGI_APPLICATION = "bookwyrm.wsgi.application"
# Redis/activity streams settings
# redis/activity streams settings
REDIS_ACTIVITY_HOST = env("REDIS_ACTIVITY_HOST", "localhost")
REDIS_ACTIVITY_PORT = env.int("REDIS_ACTIVITY_PORT", 6379)
REDIS_ACTIVITY_PASSWORD = requests.utils.quote(env("REDIS_ACTIVITY_PASSWORD", ""))
@ -218,9 +238,9 @@ STREAMS = [
]
# Search configuration
# Total time in seconds that the instance will spend searching connectors
# total time in seconds that the instance will spend searching connectors
SEARCH_TIMEOUT = env.int("SEARCH_TIMEOUT", 8)
# Timeout for a query to an individual connector
# timeout for a query to an individual connector
QUERY_TIMEOUT = env.int("INTERACTIVE_QUERY_TIMEOUT", env.int("QUERY_TIMEOUT", 5))
# Redis cache backend
@ -271,10 +291,18 @@ AUTH_USER_MODEL = "bookwyrm.User"
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{ "NAME" : "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" } ,
{ "NAME" : "django.contrib.auth.password_validation.MinimumLengthValidator" } ,
{ "NAME" : "django.contrib.auth.password_validation.CommonPasswordValidator" } ,
{ "NAME" : "django.contrib.auth.password_validation.NumericPasswordValidator" } ,
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
@ -403,11 +431,15 @@ if USE_S3 :
# Content Security Policy
CSP_DEFAULT_SRC = [
"'self'",
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}" if AWS_S3_CUSTOM_DOMAIN else None ,
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
if AWS_S3_CUSTOM_DOMAIN
else None,
] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = [
"'self'",
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}" if AWS_S3_CUSTOM_DOMAIN else None ,
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
if AWS_S3_CUSTOM_DOMAIN
else None,
] + CSP_ADDITIONAL_HOSTS
elif USE_AZURE:
# Azure settings
@ -431,16 +463,20 @@ elif USE_AZURE :
},
},
"exports": {
"BACKEND" : None , # Not implemented yet
"BACKEND": None, # not implemented yet
},
}
# Azure Static settings
STATIC_LOCATION = "static"
STATIC_URL = ( f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/" )
STATIC_URL = (
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/"
)
STATIC_FULL_URL = STATIC_URL
# Azure Media settings
MEDIA_LOCATION = "images"
MEDIA_URL = ( f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/" )
MEDIA_URL = (
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/"
)
MEDIA_FULL_URL = MEDIA_URL
# Content Security Policy
CSP_DEFAULT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
@ -456,7 +492,9 @@ else :
},
"exports": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
"OPTIONS" : { "location" : "exports" } ,
"OPTIONS": {
"location": "exports",
},
},
}
# Static settings
@ -471,6 +509,11 @@ else :
CSP_INCLUDE_NONCE_IN = ["script-src"]
OTEL_EXPORTER_OTLP_ENDPOINT = env("OTEL_EXPORTER_OTLP_ENDPOINT", None)
OTEL_EXPORTER_OTLP_HEADERS = env("OTEL_EXPORTER_OTLP_HEADERS", None)
OTEL_SERVICE_NAME = env("OTEL_SERVICE_NAME", None)
OTEL_EXPORTER_CONSOLE = env.bool("OTEL_EXPORTER_CONSOLE", False)
TWO_FACTOR_LOGIN_MAX_SECONDS = env.int("TWO_FACTOR_LOGIN_MAX_SECONDS", 60)
TWO_FACTOR_LOGIN_VALIDITY_WINDOW = env.int("TWO_FACTOR_LOGIN_VALIDITY_WINDOW", 2)
@ -478,9 +521,12 @@ HTTP_X_FORWARDED_PROTO = env . bool ( "SECURE_PROXY_SSL_HEADER" , False )
if HTTP_X_FORWARDED_PROTO:
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# Instance Actor for signing GET requests to "secure mode" Mastodon servers.
# Do not change this setting unless you already have an existing user with the same username - in which case you should change it
# Instance Actor for signing GET requests to "secure mode"
# Mastodon servers.
# Do not change this setting unless you already have an existing
# user with the same username - in which case you should change it!
INSTANCE_ACTOR_USERNAME = "bookwyrm.instance.actor"
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env (note the difference in variable names).
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env
# (note the difference in variable names).
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_MiB", 100) << 20