1
0
Fork 0

Revert formatting changes

This commit is contained in:
Reinout Meliesie 2025-03-11 10:25:28 +01:00
parent 64ea602f02
commit 1e0d547db5
Signed by: zedfrigg
GPG key ID: 3AFCC06481308BC6
3 changed files with 696 additions and 625 deletions

View file

@ -1,381 +1,406 @@
""" Access the activity streams stored in Redis """
""" access the activity streams stored in redis """
from datetime import timedelta
from django.dispatch import receiver
from django.db import transaction
from django.db.models import signals, Q
from django.utils import timezone
from bookwyrm import models
from bookwyrm . redis_store import RedisStore , r
from bookwyrm . tasks import app , STREAMS , IMPORT_TRIGGERED
from datetime import timedelta
from django . dispatch import receiver
from django . db import transaction
from django . db . models import signals , Q
from django . utils import timezone
from bookwyrm.redis_store import RedisStore, r
from bookwyrm.tasks import app, STREAMS, IMPORT_TRIGGERED
class ActivityStream (RedisStore) :
"""A category of activity stream (like home, local, books)"""
def stream_id (self , user_id ) :
"""The redis key for this user's instance of this stream"""
class ActivityStream(RedisStore):
"""a category of activity stream (like home, local, books)"""
def stream_id(self, user_id):
"""the redis key for this user's instance of this stream"""
return f"{user_id}-{self.key}"
def unread_id ( self , user_id ) :
"""The redis key for this user's unread count for this stream"""
stream_id = self . stream_id (user_id)
def unread_id(self, user_id):
"""the redis key for this user's unread count for this stream"""
stream_id = self.stream_id(user_id)
return f"{stream_id}-unread"
def unread_by_status_type_id ( self , user_id ) :
"""The redis key for this user's unread count for this stream"""
stream_id = self . stream_id (user_id)
def unread_by_status_type_id(self, user_id):
"""the redis key for this user's unread count for this stream"""
stream_id = self.stream_id(user_id)
return f"{stream_id}-unread-by-type"
def get_rank ( self , obj ) :
"""Statuses are sorted by date published"""
return obj . published_date . timestamp ()
def get_rank(self, obj):
"""statuses are sorted by date published"""
return obj.published_date.timestamp()
def add_status ( self , status , increment_unread = False ) :
"""Add a status to users' feeds"""
audience = self . get_audience (status)
# The pipeline contains all the add-to-stream activities
pipeline = self . add_object_to_stores (
status ,
self . get_stores_for_users (audience) ,
execute = False ,
def add_status(self, status, increment_unread=False):
"""add a status to users' feeds"""
audience = self.get_audience(status)
# the pipeline contains all the add-to-stream activities
pipeline = self.add_object_to_stores(
status, self.get_stores_for_users(audience), execute=False
)
if increment_unread :
for user_id in audience :
# Add to the unread status count
pipeline . incr ( self . unread_id (user_id) )
# Add to the unread status count for status type
pipeline . hincrby (
self . unread_by_status_type_id (user_id) ,
get_status_type (status) ,
1 ,
if increment_unread:
for user_id in audience:
# add to the unread status count
pipeline.incr(self.unread_id(user_id))
# add to the unread status count for status type
pipeline.hincrby(
self.unread_by_status_type_id(user_id), get_status_type(status), 1
)
# And go!
pipeline . execute ()
# and go!
pipeline.execute()
def add_user_statuses ( self , viewer , user ) :
"""Add a user's statuses to another user's feed"""
# Only add the statuses that the viewer should be able to see (ie, not DMs)
statuses = models . Status . privacy_filter (viewer) . filter ( user = user )
self . bulk_add_objects_to_store ( statuses , self . stream_id ( viewer . id ) )
def add_user_statuses(self, viewer, user):
"""add a user's statuses to another user's feed"""
# only add the statuses that the viewer should be able to see (ie, not dms)
statuses = models.Status.privacy_filter(viewer).filter(user=user)
self.bulk_add_objects_to_store(statuses, self.stream_id(viewer.id))
def remove_user_statuses ( self , viewer , user ) :
"""Remove a user's status from another user's feed"""
# Remove all so that followers only statuses are removed
statuses = user . status_set . all ()
self . bulk_remove_objects_from_store ( statuses , self . stream_id ( viewer . id ) )
def remove_user_statuses(self, viewer, user):
"""remove a user's status from another user's feed"""
# remove all so that followers only statuses are removed
statuses = user.status_set.all()
self.bulk_remove_objects_from_store(statuses, self.stream_id(viewer.id))
def get_activity_stream ( self , user ) :
"""Load the statuses to be displayed"""
# Clear unreads for this feed
r . set ( self . unread_id ( user . id ) , 0 )
r . delete ( self . unread_by_status_type_id ( user . id ) )
def get_activity_stream(self, user):
"""load the statuses to be displayed"""
# clear unreads for this feed
r.set(self.unread_id(user.id), 0)
r.delete(self.unread_by_status_type_id(user.id))
statuses = self.get_store ( self . stream_id ( user . id ) )
return models . Status . objects . select_subclasses ()
. filter ( id__in = statuses )
. select_related ( "user" , "reply_parent" , "comment__book" , "review__book" , "quotation__book" )
. prefetch_related ( "mention_books" , "mention_users" )
. order_by ("-published_date")
def get_unread_count ( self , user ) :
"""Get the unread status count for this user's feed"""
return int ( r . get ( self . unread_id ( user . id ) ) or 0 )
def get_unread_count_by_status_type ( self , user ) :
"""Get the unread status count for this user's feed's status types"""
status_types = r . hgetall ( self . unread_by_status_type_id ( user . id ) )
return {
str ( key . decode ("utf-8") ) : int (value) or 0
for key , value in status_types . items ()
}
def populate_streams ( self , user) :
"""Go from zero to a timeline"""
self . populate_store ( self . stream_id ( user . id ) )
def _get_audience ( self , status) :
"""Given a status, what users should see it, excluding the author"""
# Direct messages don't appear in feeds, direct comments/reviews/etc do
if status . privacy == "direct" and status . status_type == "Note" :
return models . User . objects . none ()
# Everybody who could plausibly see this status
audience = models . User . objects
. filter ( is_active = True , local = True ) # We only create feeds for users of this instance
. exclude (
Q ( id__in = status . user . blocks . all () ) |
Q ( blocks = status . user) # Not blocked
statuses = self.get_store(self.stream_id(user.id))
return (
models.Status.objects.select_subclasses()
.filter(id__in=statuses)
.select_related(
"user",
"reply_parent",
"comment__book",
"review__book",
"quotation__book",
)
# Only visible to the poster and mentioned users
if status . privacy == "direct" :
audience = audience . filter ( Q ( id__in = status . mention_users . all () ) ) # If the user is mentioned
# Don't show replies to statuses the user can't see
elif status . reply_parent and status . reply_parent . privacy == "followers" :
audience = audience . filter (
Q ( id = status . reply_parent . user . id ) | # If the user is the OG author
( Q ( following = status . user ) & Q ( following = status . reply_parent . user ) ) # If the user is following both authors
)
# Only visible to the poster's followers and tagged users
elif status . privacy == "followers" :
audience = audience . filter ( Q ( following = status . user ) ) # If the user is following the author
return audience . distinct ("id")
def get_audience ( self , status ) :
"""Given a status, what users should see it"""
audience = self . _get_audience (status) . values_list ( "id" , flat = True )
status_author = models . User . objects
. filter ( is_active = True , local = True , id = status . user . id )
. values_list ( "id" , flat = True )
return list ( set (audience) | set (status_author) )
def get_stores_for_users ( self , user_ids ) :
"""Convert a list of user ids into Redis store ids"""
return [ self . stream_id (user_id) for user_id in user_ids ]
def get_statuses_for_user ( self , user ) :
"""Given a user, what statuses should they see on this stream"""
return models . Status . privacy_filter (
user ,
privacy_levels = [ "public" , "unlisted" , "followers" ] ,
.prefetch_related("mention_books", "mention_users")
.order_by("-published_date")
)
def get_objects_for_store ( self , store ) :
user = models . User . objects . get ( id = store . split ("-") [0] )
return self . get_statuses_for_user (user)
def get_unread_count(self, user):
"""get the unread status count for this user's feed"""
return int(r.get(self.unread_id(user.id)) or 0)
def get_unread_count_by_status_type(self, user):
"""get the unread status count for this user's feed's status types"""
status_types = r.hgetall(self.unread_by_status_type_id(user.id))
return {
str(key.decode("utf-8")): int(value) or 0
for key, value in status_types.items()
}
def populate_streams(self, user):
"""go from zero to a timeline"""
self.populate_store(self.stream_id(user.id))
def _get_audience(self, status): # pylint: disable=no-self-use
"""given a status, what users should see it, excluding the author"""
# direct messages don't appear in feeds, direct comments/reviews/etc do
if status.privacy == "direct" and status.status_type == "Note":
return models.User.objects.none()
# everybody who could plausibly see this status
audience = models.User.objects.filter(
is_active=True,
local=True, # we only create feeds for users of this instance
).exclude(
Q(id__in=status.user.blocks.all()) | Q(blocks=status.user) # not blocked
)
# only visible to the poster and mentioned users
if status.privacy == "direct":
audience = audience.filter(
Q(id__in=status.mention_users.all()) # if the user is mentioned
)
# don't show replies to statuses the user can't see
elif status.reply_parent and status.reply_parent.privacy == "followers":
audience = audience.filter(
Q(id=status.reply_parent.user.id) # if the user is the OG author
| (
Q(following=status.user) & Q(following=status.reply_parent.user)
) # if the user is following both authors
)
# only visible to the poster's followers and tagged users
elif status.privacy == "followers":
audience = audience.filter(
Q(following=status.user) # if the user is following the author
)
return audience.distinct("id")
def get_audience(self, status):
"""given a status, what users should see it"""
audience = self._get_audience(status).values_list("id", flat=True)
status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id
).values_list("id", flat=True)
return list(set(audience) | set(status_author))
def get_stores_for_users(self, user_ids):
"""convert a list of user ids into redis store ids"""
return [self.stream_id(user_id) for user_id in user_ids]
def get_statuses_for_user(self, user): # pylint: disable=no-self-use
"""given a user, what statuses should they see on this stream"""
return models.Status.privacy_filter(
user,
privacy_levels=["public", "unlisted", "followers"],
)
def get_objects_for_store(self, store):
user = models.User.objects.get(id=store.split("-")[0])
return self.get_statuses_for_user(user)
class HomeStream (ActivityStream) :
"""Users you follow"""
class HomeStream(ActivityStream):
"""users you follow"""
key = "home"
def get_audience ( self , status ) :
audience = super () . _get_audience (status)
# If the user is following the author
audience = audience . filter ( following = status . user ) . values_list ( "id" , flat = True )
# If the user is the post's author
status_author = models . User . objects
. filter ( is_active = True , local = True , id = status . user . id )
. values_list ( "id" , flat = True )
return list ( set (audience) | set (status_author) )
def get_audience(self, status):
audience = super()._get_audience(status)
# if the user is following the author
audience = audience.filter(following=status.user).values_list("id", flat=True)
# if the user is the post's author
status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id
).values_list("id", flat=True)
return list(set(audience) | set(status_author))
def get_statuses_for_user ( self , user ) :
return models . Status . privacy_filter (
user ,
privacy_levels = [ "public" , "unlisted" , "followers" ] ,
) . exclude ( ~ Q ( # Remove everything except
Q ( user__followers = user ) | # User following
Q ( user = user ) | # Is self
Q ( mention_users = user ) # Mentions user
) )
def get_statuses_for_user(self, user):
return models.Status.privacy_filter(
user,
privacy_levels=["public", "unlisted", "followers"],
).exclude(
~Q( # remove everything except
Q(user__followers=user) # user following
| Q(user=user) # is self
| Q(mention_users=user) # mentions user
),
)
class LocalStream (ActivityStream) :
"""Users you follow"""
class LocalStream(ActivityStream):
"""users you follow"""
key = "local"
def get_audience ( self , status ) :
# This stream wants no part in non-public statuses
if status . privacy != "public" or not status . user . local :
def get_audience(self, status):
# this stream wants no part in non-public statuses
if status.privacy != "public" or not status.user.local:
return []
return super () . get_audience (status)
return super().get_audience(status)
def get_statuses_for_user ( self , user ) :
# All public statuses by a local user
return models . Status
. privacy_filter ( user , privacy_levels = [ "public" ] )
. filter ( user__local = True )
def get_statuses_for_user(self, user):
# all public statuses by a local user
return models.Status.privacy_filter(
user,
privacy_levels=["public"],
).filter(user__local=True)
class BooksStream (ActivityStream) :
"""Books on your shelves"""
class BooksStream(ActivityStream):
"""books on your shelves"""
key = "books"
def _get_audience ( self , status ) :
"""Anyone with the mentioned book on their shelves"""
work = status . book . parent_work if hasattr ( status , "book" )
else status . mention_books . first () . parent_work
def _get_audience(self, status):
"""anyone with the mentioned book on their shelves"""
work = (
status.book.parent_work
if hasattr(status, "book")
else status.mention_books.first().parent_work
)
audience = super () . _get_audience (status)
return audience . filter ( shelfbook__book__parent_work = work )
audience = super()._get_audience(status)
return audience.filter(shelfbook__book__parent_work=work)
def get_audience ( self , status ) :
# Only show public statuses on the books feed, and only statuses that mention books
if (
status . privacy != "public" or
not ( status . mention_books . exists () or hasattr ( status , "book" ) )
) :
def get_audience(self, status):
# only show public statuses on the books feed,
# and only statuses that mention books
if status.privacy != "public" or not (
status.mention_books.exists() or hasattr(status, "book")
):
return []
return super () . get_audience (status)
return super().get_audience(status)
def get_statuses_for_user ( self , user ) :
"""Any public status that mentions the user's books"""
books = user . shelfbook_set
. values_list ( "book__parent_work__id" , flat = True )
. distinct ()
return models . Status
. privacy_filter ( user , privacy_levels = [ "public" ] )
. filter (
Q ( comment__book__parent_work__id__in = books ) |
Q ( quotation__book__parent_work__id__in = books ) |
Q ( review__book__parent_work__id__in = books ) |
Q ( mention_books__parent_work__id__in = books )
def get_statuses_for_user(self, user):
"""any public status that mentions the user's books"""
books = user.shelfbook_set.values_list(
"book__parent_work__id", flat=True
).distinct()
return (
models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
. distinct ()
.filter(
Q(comment__book__parent_work__id__in=books)
| Q(quotation__book__parent_work__id__in=books)
| Q(review__book__parent_work__id__in=books)
| Q(mention_books__parent_work__id__in=books)
)
.distinct()
)
def add_book_statuses ( self , user , book ) :
"""Add statuses about a book to a user's feed"""
work = book . parent_work
statuses = models . Status . privacy_filter ( user , privacy_levels = [ "public" ] )
def add_book_statuses(self, user, book):
"""add statuses about a book to a user's feed"""
work = book.parent_work
statuses = models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
book_comments = statuses . filter ( Q ( comment__book__parent_work = work ) )
book_quotations = statuses . filter ( Q ( quotation__book__parent_work = work ) )
book_reviews = statuses . filter ( Q ( review__book__parent_work = work ) )
book_mentions = statuses . filter ( Q ( mention_books__parent_work = work ) )
book_comments = statuses.filter(Q(comment__book__parent_work=work))
book_quotations = statuses.filter(Q(quotation__book__parent_work=work))
book_reviews = statuses.filter(Q(review__book__parent_work=work))
book_mentions = statuses.filter(Q(mention_books__parent_work=work))
self . bulk_add_objects_to_store ( book_comments , self . stream_id ( user . id ) )
self . bulk_add_objects_to_store ( book_quotations , self . stream_id ( user . id ) )
self . bulk_add_objects_to_store ( book_reviews , self . stream_id ( user . id ) )
self . bulk_add_objects_to_store ( book_mentions , self . stream_id ( user . id ) )
self.bulk_add_objects_to_store(book_comments, self.stream_id(user.id))
self.bulk_add_objects_to_store(book_quotations, self.stream_id(user.id))
self.bulk_add_objects_to_store(book_reviews, self.stream_id(user.id))
self.bulk_add_objects_to_store(book_mentions, self.stream_id(user.id))
def remove_book_statuses ( self , user , book ) :
"""Add statuses about a book to a user's feed"""
work = book . parent_work
statuses = models . Status . privacy_filter ( user , privacy_levels = [ "public" ] )
def remove_book_statuses(self, user, book):
"""add statuses about a book to a user's feed"""
work = book.parent_work
statuses = models.Status.privacy_filter(
user,
privacy_levels=["public"],
)
book_comments = statuses . filter ( Q ( comment__book__parent_work = work ) )
book_quotations = statuses . filter ( Q ( quotation__book__parent_work = work ) )
book_reviews = statuses . filter ( Q ( review__book__parent_work = work ) )
book_mentions = statuses . filter ( Q ( mention_books__parent_work = work ) )
book_comments = statuses.filter(Q(comment__book__parent_work=work))
book_quotations = statuses.filter(Q(quotation__book__parent_work=work))
book_reviews = statuses.filter(Q(review__book__parent_work=work))
book_mentions = statuses.filter(Q(mention_books__parent_work=work))
self . bulk_remove_objects_from_store ( book_comments , self . stream_id ( user . id ) )
self . bulk_remove_objects_from_store ( book_quotations , self . stream_id ( user . id ) )
self . bulk_remove_objects_from_store ( book_reviews , self . stream_id ( user . id ) )
self . bulk_remove_objects_from_store ( book_mentions , self . stream_id ( user . id ) )
self.bulk_remove_objects_from_store(book_comments, self.stream_id(user.id))
self.bulk_remove_objects_from_store(book_quotations, self.stream_id(user.id))
self.bulk_remove_objects_from_store(book_reviews, self.stream_id(user.id))
self.bulk_remove_objects_from_store(book_mentions, self.stream_id(user.id))
# Determine which streams are enabled in settings.py
# determine which streams are enabled in settings.py
streams = {
"home" : HomeStream () ,
"local" : LocalStream () ,
"books" : BooksStream () ,
"home": HomeStream(),
"local": LocalStream(),
"books": BooksStream(),
}
@ receiver ( signals . post_save )
def add_status_on_create ( sender , instance , created , * args , ** kwargs ) :
"""Add newly created statuses to activity feeds"""
# We're only interested in new statuses
if not issubclass ( sender , models . Status ) :
@receiver(signals.post_save)
# pylint: disable=unused-argument
def add_status_on_create(sender, instance, created, *args, **kwargs):
"""add newly created statuses to activity feeds"""
# we're only interested in new statuses
if not issubclass(sender, models.Status):
return
if instance . deleted :
remove_status_task . delay ( instance . id )
if instance.deleted:
remove_status_task.delay(instance.id)
return
# We don't want to create multiple add_status_tasks for each status, and because the transactions are atomic,
# on_commit won't run until the status is ready to add.
if not created :
# We don't want to create multiple add_status_tasks for each status, and because
# the transactions are atomic, on_commit won't run until the status is ready to add.
if not created:
return
# When creating new things, gotta wait on the transaction
transaction . on_commit ( lambda : add_status_on_create_command ( sender , instance , created ) )
# when creating new things, gotta wait on the transaction
transaction.on_commit(
lambda: add_status_on_create_command(sender, instance, created)
)
def add_status_on_create_command ( sender , instance , created ) :
"""Runs this code only after the database commit completes"""
# Boosts trigger 'saves" twice, so don't bother duplicating the task
if sender == models . Boost and not created :
def add_status_on_create_command(sender, instance, created):
"""runs this code only after the database commit completes"""
# boosts trigger 'saves" twice, so don't bother duplicating the task
if sender == models.Boost and not created:
return
priority = STREAMS
# Check if this is an old status, de-prioritize if so
# check if this is an old status, de-prioritize if so
# (this will happen if federation is very slow, or, more expectedly, on csv import)
if (
instance . published_date < timezone . now () - timedelta ( days = 1 ) or
instance.created_date < instance.published_date - timedelta(days=1)
) :
# A backdated status from a local user is an import, don't add it
if instance . user . local :
if instance.published_date < timezone.now() - timedelta(
days=1
) or instance.created_date < instance.published_date - timedelta(days=1):
# a backdated status from a local user is an import, don't add it
if instance.user.local:
return
# An out of date remote status is a low priority but should be added
# an out of date remote status is a low priority but should be added
priority = IMPORT_TRIGGERED
add_status_task . apply_async (
args = ( instance . id , ) ,
kwargs = { "increment_unread" : created } ,
queue = priority ,
add_status_task.apply_async(
args=(instance.id,),
kwargs={"increment_unread": created},
queue=priority,
)
if sender == models . Boost :
handle_boost_task . delay ( instance . id )
if sender == models.Boost:
handle_boost_task.delay(instance.id)
@ receiver ( signals . post_delete , sender = models . Boost )
def remove_boost_on_delete ( sender , instance , * args , ** kwargs ) :
"""Boosts are deleted"""
# Remove the boost
remove_status_task . delay ( instance . id )
# Re-add the original status
add_status_task . delay ( instance . boosted_status . id )
@receiver(signals.post_delete, sender=models.Boost)
# pylint: disable=unused-argument
def remove_boost_on_delete(sender, instance, *args, **kwargs):
"""boosts are deleted"""
# remove the boost
remove_status_task.delay(instance.id)
# re-add the original status
add_status_task.delay(instance.boosted_status.id)
@ receiver ( signals . post_save , sender = models . UserFollows )
def add_statuses_on_follow ( sender , instance , created , * args , ** kwargs ) :
"""Add a newly followed user's statuses to feeds"""
if not created or not instance . user_subject . local :
@receiver(signals.post_save, sender=models.UserFollows)
# pylint: disable=unused-argument
def add_statuses_on_follow(sender, instance, created, *args, **kwargs):
"""add a newly followed user's statuses to feeds"""
if not created or not instance.user_subject.local:
return
add_user_statuses_task . delay (
instance . user_subject . id ,
instance . user_object . id ,
stream_list = [ "home" ] ,
add_user_statuses_task.delay(
instance.user_subject.id, instance.user_object.id, stream_list=["home"]
)
@ receiver ( signals . post_delete , sender = models . UserFollows )
def remove_statuses_on_unfollow ( sender , instance , * args , ** kwargs ) :
"""Remove statuses from a feed on unfollow"""
if not instance . user_subject . local :
@receiver(signals.post_delete, sender=models.UserFollows)
# pylint: disable=unused-argument
def remove_statuses_on_unfollow(sender, instance, *args, **kwargs):
"""remove statuses from a feed on unfollow"""
if not instance.user_subject.local:
return
remove_user_statuses_task . delay (
instance . user_subject . id ,
instance . user_object . id ,
stream_list = [ "home" ] ,
remove_user_statuses_task.delay(
instance.user_subject.id, instance.user_object.id, stream_list=["home"]
)
@ receiver ( signals . post_save , sender = models . UserBlocks )
def remove_statuses_on_block ( sender , instance , * args , ** kwargs ) :
"""Remove statuses from all feeds on block"""
# Blocks apply ot all feeds
if instance . user_subject . local :
remove_user_statuses_task . delay (
instance . user_subject . id ,
instance . user_object . id ,
@receiver(signals.post_save, sender=models.UserBlocks)
# pylint: disable=unused-argument
def remove_statuses_on_block(sender, instance, *args, **kwargs):
"""remove statuses from all feeds on block"""
# blocks apply ot all feeds
if instance.user_subject.local:
remove_user_statuses_task.delay(
instance.user_subject.id, instance.user_object.id
)
# And in both directions
if instance . user_object . local :
remove_user_statuses_task . delay (
instance . user_object . id ,
instance . user_subject . id ,
# and in both directions
if instance.user_object.local:
remove_user_statuses_task.delay(
instance.user_object.id, instance.user_subject.id
)
@receiver(signals.post_delete, sender=models.UserBlocks)
# pylint: disable=unused-argument
def add_statuses_on_unblock(sender, instance, *args, **kwargs):
"""add statuses back to all feeds on unblock"""
# make sure there isn't a block in the other direction
@ -405,6 +430,7 @@ def add_statuses_on_unblock(sender, instance, *args, **kwargs):
@receiver(signals.post_save, sender=models.User)
# pylint: disable=unused-argument
def populate_streams_on_account_create(sender, instance, created, *args, **kwargs):
"""build a user's feeds when they join"""
if not created or not instance.local:
@ -421,6 +447,7 @@ def populate_streams_on_account_create_command(instance_id):
@receiver(signals.pre_save, sender=models.ShelfBook)
# pylint: disable=unused-argument
def add_statuses_on_shelve(sender, instance, *args, **kwargs):
"""update books stream when user shelves a book"""
if not instance.user.local:
@ -436,6 +463,7 @@ def add_statuses_on_shelve(sender, instance, *args, **kwargs):
@receiver(signals.post_delete, sender=models.ShelfBook)
# pylint: disable=unused-argument
def remove_statuses_on_unshelve(sender, instance, *args, **kwargs):
"""update books stream when user unshelves a book"""
if not instance.user.local:

View file

@ -1,51 +1,48 @@
"""Do further startup configuration and initialization"""
import logging
import os
import urllib
import logging
from django.apps import AppConfig
from bookwyrm import settings
from django . apps import AppConfig
logger = logging.getLogger(__name__)
logger = logging . getLogger (__name__)
def download_file ( url , destination) :
def download_file(url, destination):
"""Downloads a file to the given path"""
try :
try:
# Ensure our destination directory exists
os . makedirs ( os . path . dirname (destination) , exist_ok = True )
with urllib . request . urlopen (url) as stream :
with open ( destination , "b+w" ) as outfile :
outfile . write ( stream . read () )
except ( urllib . error . HTTPError , urllib . error . URLError ) as err :
logger . error ( "Failed to download file %s: %s" , url , err )
except OSError as err :
logger . error ( "Couldn't open font file %s for writing: %s" , destination , err )
except Exception as err :
logger . error ( "Unknown error in file download: %s" , err )
os.makedirs(os.path.dirname(destination), exist_ok=True)
with urllib.request.urlopen(url) as stream:
with open(destination, "b+w") as outfile:
outfile.write(stream.read())
except (urllib.error.HTTPError, urllib.error.URLError) as err:
logger.error("Failed to download file %s: %s", url, err)
except OSError as err:
logger.error("Couldn't open font file %s for writing: %s", destination, err)
except Exception as err: # pylint:disable=broad-except
logger.error("Unknown error in file download: %s", err)
class BookwyrmConfig (AppConfig) :
class BookwyrmConfig(AppConfig):
"""Handles additional configuration"""
name = "bookwyrm"
verbose_name = "BookWyrm"
def ready (self) :
"""Set up preview image files, if desired"""
if settings . ENABLE_PREVIEW_IMAGES and settings . FONTS :
def ready(self):
"""set up OTLP and preview image files, if desired"""
if settings.ENABLE_PREVIEW_IMAGES and settings.FONTS:
# Download any fonts that we don't have yet
logger . debug ("Downloading fonts..")
for name , config in settings . FONTS . items () :
font_path = os . path . join (
settings . FONT_DIR , config [ "directory" ] , config [ "filename" ]
logger.debug("Downloading fonts..")
for name, config in settings.FONTS.items():
font_path = os.path.join(
settings.FONT_DIR, config["directory"], config["filename"]
)
if "url" in config and not os . path . exists (font_path) :
logger . info ( "Just a sec, downloading %s" , name )
download_file ( config [ "url" ] , font_path )
if "url" in config and not os.path.exists(font_path):
logger.info("Just a sec, downloading %s", name)
download_file(config["url"], font_path)

View file

@ -1,249 +1,269 @@
""" Bookwyrm settings and configuration """
""" bookwyrm settings and configuration """
import os
import requests
from django . core . exceptions import ImproperlyConfigured
from django . utils . translation import gettext_lazy as _
from environs import Env
from typing import AnyStr
from environs import Env
env = Env ()
env . read_env ( "/etc/bookwyrm/config.env" , recurse = False )
DOMAIN = env ("DOMAIN")
import requests
from django.utils.translation import gettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
with open ( "VERSION" , encoding = "utf-8" ) as f :
version = f . read ()
version = version . replace ( "\n" , "" )
# pylint: disable=line-too-long
env = Env()
env.read_env("/etc/bookwyrm/config.env", recurse=False)
DOMAIN = env("DOMAIN")
with open("VERSION", encoding="utf-8") as f:
version = f.read()
version = version.replace("\n", "")
VERSION = version
RELEASE_API = env (
"RELEASE_API" ,
"https://api.github.com/repos/bookwyrm-social/bookwyrm/releases/latest" ,
RELEASE_API = env(
"RELEASE_API",
"https://api.github.com/repos/bookwyrm-social/bookwyrm/releases/latest",
)
PAGE_LENGTH = env . int ( "PAGE_LENGTH" , 15 )
DEFAULT_LANGUAGE = env ( "DEFAULT_LANGUAGE" , "English" )
# TODO: Extend maximum age to 1 year once termination of active sessions is implemented (see bookwyrm-social#2278, bookwyrm-social#3082).
SESSION_COOKIE_AGE = env . int ( "SESSION_COOKIE_AGE" , 3600 * 24 * 30 ) # 1 month
PAGE_LENGTH = env.int("PAGE_LENGTH", 15)
DEFAULT_LANGUAGE = env("DEFAULT_LANGUAGE", "English")
# TODO: extend maximum age to 1 year once termination of active sessions
# is implemented (see bookwyrm-social#2278, bookwyrm-social#3082).
SESSION_COOKIE_AGE = env.int("SESSION_COOKIE_AGE", 3600 * 24 * 30) # 1 month
JS_CACHE = "8a89cad7"
# Email
EMAIL_BACKEND = env ( "EMAIL_BACKEND" , "django.core.mail.backends.smtp.EmailBackend" )
EMAIL_HOST = env ("EMAIL_HOST")
EMAIL_PORT = env . int ( "EMAIL_PORT" , 587 )
EMAIL_HOST_USER = env ("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = env ("EMAIL_HOST_PASSWORD")
EMAIL_USE_TLS = env . bool ( "EMAIL_USE_TLS" , True )
EMAIL_USE_SSL = env . bool ( "EMAIL_USE_SSL" , False )
EMAIL_SENDER_NAME = env ( "EMAIL_SENDER_NAME" , "admin" )
EMAIL_SENDER_DOMAIN = env ( "EMAIL_SENDER_DOMAIN" , DOMAIN )
# email
EMAIL_BACKEND = env("EMAIL_BACKEND", "django.core.mail.backends.smtp.EmailBackend")
EMAIL_HOST = env("EMAIL_HOST")
EMAIL_PORT = env.int("EMAIL_PORT", 587)
EMAIL_HOST_USER = env("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = env("EMAIL_HOST_PASSWORD")
EMAIL_USE_TLS = env.bool("EMAIL_USE_TLS", True)
EMAIL_USE_SSL = env.bool("EMAIL_USE_SSL", False)
EMAIL_SENDER_NAME = env("EMAIL_SENDER_NAME", "admin")
EMAIL_SENDER_DOMAIN = env("EMAIL_SENDER_DOMAIN", DOMAIN)
EMAIL_SENDER = f"{EMAIL_SENDER_NAME}@{EMAIL_SENDER_DOMAIN}"
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR : AnyStr = os . path . dirname ( os . path . dirname ( os . path . abspath (__file__) ) )
LOCALE_PATHS = [ os . path . join ( BASE_DIR , "locale" ) ]
LANGUAGE_COOKIE_NAME = env . str ( "LANGUAGE_COOKIE_NAME" , "django_language" )
BASE_DIR: AnyStr = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOCALE_PATHS = [
os.path.join(BASE_DIR, "locale"),
]
LANGUAGE_COOKIE_NAME = env.str("LANGUAGE_COOKIE_NAME", "django_language")
STATIC_ROOT = os . path . join ( BASE_DIR , env ( "STATIC_ROOT" , "static" ) )
MEDIA_ROOT = os . path . join ( BASE_DIR , env ( "MEDIA_ROOT" , "images" ) )
STATIC_ROOT = os.path.join(BASE_DIR, env("STATIC_ROOT", "static"))
MEDIA_ROOT = os.path.join(BASE_DIR, env("MEDIA_ROOT", "images"))
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
# Preview image
ENABLE_PREVIEW_IMAGES = env . bool ( "ENABLE_PREVIEW_IMAGES" , False )
PREVIEW_BG_COLOR = env . str ( "PREVIEW_BG_COLOR" , "use_dominant_color_light" )
PREVIEW_TEXT_COLOR = env . str ( "PREVIEW_TEXT_COLOR" , "#363636" )
PREVIEW_IMG_WIDTH = env . int ( "PREVIEW_IMG_WIDTH" , 1200 )
PREVIEW_IMG_HEIGHT = env . int ( "PREVIEW_IMG_HEIGHT" , 630 )
PREVIEW_DEFAULT_COVER_COLOR = env . str ( "PREVIEW_DEFAULT_COVER_COLOR" , "#002549" )
PREVIEW_DEFAULT_FONT = env . str ( "PREVIEW_DEFAULT_FONT" , "Source Han Sans" )
ENABLE_PREVIEW_IMAGES = env.bool("ENABLE_PREVIEW_IMAGES", False)
PREVIEW_BG_COLOR = env.str("PREVIEW_BG_COLOR", "use_dominant_color_light")
PREVIEW_TEXT_COLOR = env.str("PREVIEW_TEXT_COLOR", "#363636")
PREVIEW_IMG_WIDTH = env.int("PREVIEW_IMG_WIDTH", 1200)
PREVIEW_IMG_HEIGHT = env.int("PREVIEW_IMG_HEIGHT", 630)
PREVIEW_DEFAULT_COVER_COLOR = env.str("PREVIEW_DEFAULT_COVER_COLOR", "#002549")
PREVIEW_DEFAULT_FONT = env.str("PREVIEW_DEFAULT_FONT", "Source Han Sans")
FONTS = {
"Source Han Sans" : {
"directory" : "source_han_sans" ,
"filename" : "SourceHanSans-VF.ttf.ttc" ,
"url" : "https://github.com/adobe-fonts/source-han-sans/raw/release/Variable/OTC/SourceHanSans-VF.ttf.ttc" ,
"Source Han Sans": {
"directory": "source_han_sans",
"filename": "SourceHanSans-VF.ttf.ttc",
"url": "https://github.com/adobe-fonts/source-han-sans/raw/release/Variable/OTC/SourceHanSans-VF.ttf.ttc",
}
}
FONT_DIR = os . path . join ( STATIC_ROOT , "fonts" )
FONT_DIR = os.path.join(STATIC_ROOT, "fonts")
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: Don't run with debug turned on in production
DEBUG = env . bool ( "DEBUG" , True )
USE_HTTPS = env . bool ( "USE_HTTPS" , not DEBUG )
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", True)
USE_HTTPS = env.bool("USE_HTTPS", not DEBUG)
# SECURITY WARNING: Keep the secret key used in production secret
SECRET_KEY = env ("SECRET_KEY")
if not DEBUG and SECRET_KEY == "7(2w1sedok=aznpq)ta1mc4i%4h=xx@hxwx*o57ctsuml0x%fr" :
raise ImproperlyConfigured ("You must change the SECRET_KEY env variable")
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env("SECRET_KEY")
if not DEBUG and SECRET_KEY == "7(2w1sedok=aznpq)ta1mc4i%4h=xx@hxwx*o57ctsuml0x%fr":
raise ImproperlyConfigured("You must change the SECRET_KEY env variable")
ALLOWED_HOSTS = env . list ( "ALLOWED_HOSTS" , [ "*" ] )
ALLOWED_HOSTS = env.list("ALLOWED_HOSTS", ["*"])
# Application definition
INSTALLED_APPS = [
"django.contrib.admin" ,
"django.contrib.auth" ,
"django.contrib.contenttypes" ,
"django.contrib.sessions" ,
"django.contrib.messages" ,
"django.contrib.staticfiles" ,
"django.contrib.humanize" ,
"oauth2_provider" ,
"file_resubmit" ,
"sass_processor" ,
"bookwyrm" ,
"celery" ,
"django_celery_beat" ,
"imagekit" ,
"pgtrigger" ,
"storages" ,
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.humanize",
"oauth2_provider",
"file_resubmit",
"sass_processor",
"bookwyrm",
"celery",
"django_celery_beat",
"imagekit",
"pgtrigger",
"storages",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware" ,
"django.contrib.sessions.middleware.SessionMiddleware" ,
"django.middleware.locale.LocaleMiddleware" ,
"django.middleware.common.CommonMiddleware" ,
"django.middleware.csrf.CsrfViewMiddleware" ,
"csp.middleware.CSPMiddleware" ,
"django.contrib.auth.middleware.AuthenticationMiddleware" ,
"bookwyrm.middleware.TimezoneMiddleware" ,
"bookwyrm.middleware.IPBlocklistMiddleware" ,
"django.contrib.messages.middleware.MessageMiddleware" ,
"django.middleware.clickjacking.XFrameOptionsMiddleware" ,
"bookwyrm.middleware.FileTooBig" ,
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"csp.middleware.CSPMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"bookwyrm.middleware.TimezoneMiddleware",
"bookwyrm.middleware.IPBlocklistMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"bookwyrm.middleware.FileTooBig",
]
ROOT_URLCONF = "bookwyrm.urls"
TEMPLATES = [
{
"BACKEND" : "django.template.backends.django.DjangoTemplates" ,
"DIRS" : [ "templates" ] ,
"APP_DIRS" : True ,
"OPTIONS" : {
"context_processors" : [
"django.template.context_processors.debug" ,
"django.template.context_processors.request" ,
"django.contrib.auth.context_processors.auth" ,
"django.contrib.messages.context_processors.messages" ,
"bookwyrm.context_processors.site_settings" ,
] ,
} ,
} ,
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ["templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"bookwyrm.context_processors.site_settings",
],
},
},
]
LOG_LEVEL = env ( "LOG_LEVEL" , "INFO" ) . upper ()
LOG_LEVEL = env("LOG_LEVEL", "INFO").upper()
# Override aspects of the default handler to our taste
# See https://docs.djangoproject.com/en/3.2/topics/logging/#default-logging-configuration for a reference to the defaults we're overriding
# It seems that in order to override anything you have to include its entire dependency tree (handlers and filters) which makes this a bit verbose
# See https://docs.djangoproject.com/en/3.2/topics/logging/#default-logging-configuration
# for a reference to the defaults we're overriding
#
# It seems that in order to override anything you have to include its
# entire dependency tree (handlers and filters) which makes this a
# bit verbose
LOGGING = {
"version" : 1 ,
"disable_existing_loggers" : False ,
"filters" : {
# These are copied from the default configuration, required for implementing mail_admins below
"require_debug_false" : { "()" : "django.utils.log.RequireDebugFalse" } ,
"require_debug_true": { "()" : "django.utils.log.RequireDebugTrue" } ,
"ignore_missing_variable" : { "()" : "bookwyrm.utils.log.IgnoreVariableDoesNotExist" } ,
} ,
"handlers" : {
# Overrides the default handler to make it log to console regardless of the DEBUG setting (default is to not log to console if DEBUG=False)
"console" : {
"level" : LOG_LEVEL ,
"filters" : [ "ignore_missing_variable" ] ,
"class" : "logging.StreamHandler" ,
"version": 1,
"disable_existing_loggers": False,
"filters": {
# These are copied from the default configuration, required for
# implementing mail_admins below
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
},
# This is copied as-is from the default logger, and is required for the Django section below
"mail_admins" : {
"level" : "ERROR" ,
"filters" : [ "require_debug_false" ] ,
"class" : "django.utils.log.AdminEmailHandler" ,
} ,
} ,
"loggers" : {
# Install our new console handler for Django's logger, and override the log level while we're at it
"django" : {
"handlers" : [ "console" , "mail_admins" ] ,
"level" : LOG_LEVEL ,
} ,
"django.utils.autoreload" : { "level" : "INFO" } ,
# Add a Bookwyrm-specific logger
"bookwyrm" : {
"handlers" : [ "console" ] ,
"level" : LOG_LEVEL ,
} ,
} ,
"require_debug_true": {
"()": "django.utils.log.RequireDebugTrue",
},
"ignore_missing_variable": {
"()": "bookwyrm.utils.log.IgnoreVariableDoesNotExist",
},
},
"handlers": {
# Overrides the default handler to make it log to console
# regardless of the DEBUG setting (default is to not log to
# console if DEBUG=False)
"console": {
"level": LOG_LEVEL,
"filters": ["ignore_missing_variable"],
"class": "logging.StreamHandler",
},
# This is copied as-is from the default logger, and is
# required for the django section below
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
"class": "django.utils.log.AdminEmailHandler",
},
},
"loggers": {
# Install our new console handler for Django's logger, and
# override the log level while we're at it
"django": {
"handlers": ["console", "mail_admins"],
"level": LOG_LEVEL,
},
"django.utils.autoreload": {
"level": "INFO",
},
# Add a bookwyrm-specific logger
"bookwyrm": {
"handlers": ["console"],
"level": LOG_LEVEL,
},
},
}
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder" ,
"django.contrib.staticfiles.finders.AppDirectoriesFinder" ,
"sass_processor.finders.CssFinder" ,
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
"sass_processor.finders.CssFinder",
]
SASS_PROCESSOR_INCLUDE_FILE_PATTERN = r"^.+\.[s]{0,1}(?:a|c)ss$"
# When debug is disabled, make sure to compile themes once with `./bw-dev compile_themes`
# when debug is disabled, make sure to compile themes once with `./bw-dev compile_themes`
SASS_PROCESSOR_ENABLED = DEBUG
# Minify CSS in production but not dev
# minify css is production but not dev
if not DEBUG:
SASS_OUTPUT_STYLE = "compressed"
WSGI_APPLICATION = "bookwyrm.wsgi.application"
# Redis/activity streams settings
REDIS_ACTIVITY_HOST = env ( "REDIS_ACTIVITY_HOST" , "localhost" )
REDIS_ACTIVITY_PORT = env . int ( "REDIS_ACTIVITY_PORT" , 6379 )
REDIS_ACTIVITY_PASSWORD = requests . utils . quote ( env ( "REDIS_ACTIVITY_PASSWORD" , "" ) )
REDIS_ACTIVITY_DB_INDEX = env . int ( "REDIS_ACTIVITY_DB_INDEX" , 0 )
REDIS_ACTIVITY_URL = env (
"REDIS_ACTIVITY_URL" ,
f"redis://:{REDIS_ACTIVITY_PASSWORD}@{REDIS_ACTIVITY_HOST}:{REDIS_ACTIVITY_PORT}/{REDIS_ACTIVITY_DB_INDEX}" ,
# redis/activity streams settings
REDIS_ACTIVITY_HOST = env("REDIS_ACTIVITY_HOST", "localhost")
REDIS_ACTIVITY_PORT = env.int("REDIS_ACTIVITY_PORT", 6379)
REDIS_ACTIVITY_PASSWORD = requests.utils.quote(env("REDIS_ACTIVITY_PASSWORD", ""))
REDIS_ACTIVITY_DB_INDEX = env.int("REDIS_ACTIVITY_DB_INDEX", 0)
REDIS_ACTIVITY_URL = env(
"REDIS_ACTIVITY_URL",
f"redis://:{REDIS_ACTIVITY_PASSWORD}@{REDIS_ACTIVITY_HOST}:{REDIS_ACTIVITY_PORT}/{REDIS_ACTIVITY_DB_INDEX}",
)
MAX_STREAM_LENGTH = env . int ( "MAX_STREAM_LENGTH" , 200 )
MAX_STREAM_LENGTH = env.int("MAX_STREAM_LENGTH", 200)
STREAMS = [
{ "key" : "home" , "name" : _("Home Timeline") , "shortname" : _("Home") } ,
{ "key" : "books" , "name" : _("Books Timeline") , "shortname" : _("Books") } ,
{"key": "home", "name": _("Home Timeline"), "shortname": _("Home")},
{"key": "books", "name": _("Books Timeline"), "shortname": _("Books")},
]
# Search configuration
# Total time in seconds that the instance will spend searching connectors
SEARCH_TIMEOUT = env . int ( "SEARCH_TIMEOUT" , 8 )
# Timeout for a query to an individual connector
QUERY_TIMEOUT = env . int ( "INTERACTIVE_QUERY_TIMEOUT" , env . int ( "QUERY_TIMEOUT" , 5 ) )
# total time in seconds that the instance will spend searching connectors
SEARCH_TIMEOUT = env.int("SEARCH_TIMEOUT", 8)
# timeout for a query to an individual connector
QUERY_TIMEOUT = env.int("INTERACTIVE_QUERY_TIMEOUT", env.int("QUERY_TIMEOUT", 5))
# Redis cache backend
if env . bool ( "USE_DUMMY_CACHE" , False ) :
if env.bool("USE_DUMMY_CACHE", False):
CACHES = {
"default" : {
"BACKEND" : "django.core.cache.backends.dummy.DummyCache" ,
} ,
"file_resubmit" : {
"BACKEND" : "django.core.cache.backends.dummy.DummyCache" ,
"LOCATION" : "/tmp/file_resubmit_tests/" ,
} ,
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
},
"file_resubmit": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
"LOCATION": "/tmp/file_resubmit_tests/",
},
}
else :
else:
CACHES = {
"default" : {
"BACKEND" : "django.core.cache.backends.redis.RedisCache" ,
"LOCATION" : REDIS_ACTIVITY_URL ,
} ,
"file_resubmit" : {
"BACKEND" : "django.core.cache.backends.filebased.FileBasedCache" ,
"LOCATION" : "/tmp/file_resubmit/" ,
} ,
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": REDIS_ACTIVITY_URL,
},
"file_resubmit": {
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/tmp/file_resubmit/",
},
}
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
@ -253,14 +273,14 @@ else :
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
"default" : {
"ENGINE" : "django.db.backends.postgresql_psycopg2" ,
"NAME" : env ( "POSTGRES_DB" , "bookwyrm" ) ,
"USER" : env ( "POSTGRES_USER" , "bookwyrm" ) ,
"PASSWORD" : env ( "POSTGRES_PASSWORD" , "bookwyrm" ) ,
"HOST" : env ( "POSTGRES_HOST" , "" ) ,
"PORT" : env . int ( "PGPORT" , 5432 ) ,
} ,
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": env("POSTGRES_DB", "bookwyrm"),
"USER": env("POSTGRES_USER", "bookwyrm"),
"PASSWORD": env("POSTGRES_PASSWORD", "bookwyrm"),
"HOST": env("POSTGRES_HOST", ""),
"PORT": env.int("PGPORT", 5432),
},
}
@ -271,45 +291,53 @@ AUTH_USER_MODEL = "bookwyrm.User"
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{ "NAME" : "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" } ,
{ "NAME" : "django.contrib.auth.password_validation.MinimumLengthValidator" } ,
{ "NAME" : "django.contrib.auth.password_validation.CommonPasswordValidator" } ,
{ "NAME" : "django.contrib.auth.password_validation.NumericPasswordValidator" } ,
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = env ( "LANGUAGE_CODE" , "en-us" )
LANGUAGE_CODE = env("LANGUAGE_CODE", "en-us")
LANGUAGES = [
( "en-us" , _("English") ) ,
( "ca-es" , _("Català (Catalan)") ) ,
( "de-de" , _("Deutsch (German)") ) ,
( "eo-uy" , _("Esperanto (Esperanto)") ) ,
( "es-es" , _("Español (Spanish)") ) ,
( "eu-es" , _("Euskara (Basque)") ) ,
( "gl-es" , _("Galego (Galician)") ) ,
( "it-it" , _("Italiano (Italian)") ) ,
( "ko-kr" , _("한국어 (Korean)") ) ,
( "fi-fi" , _("Suomi (Finnish)") ) ,
( "fr-fr" , _("Français (French)") ) ,
( "lt-lt" , _("Lietuvių (Lithuanian)") ) ,
( "nl-nl" , _("Nederlands (Dutch)") ) ,
( "no-no" , _("Norsk (Norwegian)") ) ,
( "pl-pl" , _("Polski (Polish)") ) ,
( "pt-br" , _("Português do Brasil (Brazilian Portuguese)") ) ,
( "pt-pt" , _("Português Europeu (European Portuguese)") ) ,
( "ro-ro" , _("Română (Romanian)") ) ,
( "sv-se" , _("Svenska (Swedish)") ) ,
( "uk-ua" , _("Українська (Ukrainian)") ) ,
( "zh-hans" , _("简体中文 (Simplified Chinese)") ) ,
( "zh-hant" , _("繁體中文 (Traditional Chinese)") ) ,
("en-us", _("English")),
("ca-es", _("Català (Catalan)")),
("de-de", _("Deutsch (German)")),
("eo-uy", _("Esperanto (Esperanto)")),
("es-es", _("Español (Spanish)")),
("eu-es", _("Euskara (Basque)")),
("gl-es", _("Galego (Galician)")),
("it-it", _("Italiano (Italian)")),
("ko-kr", _("한국어 (Korean)")),
("fi-fi", _("Suomi (Finnish)")),
("fr-fr", _("Français (French)")),
("lt-lt", _("Lietuvių (Lithuanian)")),
("nl-nl", _("Nederlands (Dutch)")),
("no-no", _("Norsk (Norwegian)")),
("pl-pl", _("Polski (Polish)")),
("pt-br", _("Português do Brasil (Brazilian Portuguese)")),
("pt-pt", _("Português Europeu (European Portuguese)")),
("ro-ro", _("Română (Romanian)")),
("sv-se", _("Svenska (Swedish)")),
("uk-ua", _("Українська (Ukrainian)")),
("zh-hans", _("简体中文 (Simplified Chinese)")),
("zh-hant", _("繁體中文 (Traditional Chinese)")),
]
LANGUAGE_ARTICLES = {
"English" : { "the" , "a" , "an" } ,
"Español (Spanish)" : { "un" , "una" , "unos" , "unas" , "el" , "la" , "los" , "las" } ,
"English": {"the", "a", "an"},
"Español (Spanish)": {"un", "una", "unos", "unas", "el", "la", "los", "las"},
}
TIME_ZONE = "UTC"
@ -319,78 +347,78 @@ USE_I18N = True
USE_TZ = True
# Imagekit generated thumbnails
ENABLE_THUMBNAIL_GENERATION = env . bool ( "ENABLE_THUMBNAIL_GENERATION" , False )
ENABLE_THUMBNAIL_GENERATION = env.bool("ENABLE_THUMBNAIL_GENERATION", False)
IMAGEKIT_CACHEFILE_DIR = "thumbnails"
IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY = "bookwyrm.thumbnail_generation.Strategy"
PROJECT_DIR = os . path . dirname ( os . path . abspath (__file__) )
CSP_ADDITIONAL_HOSTS = env . list ( "CSP_ADDITIONAL_HOSTS" , [] )
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
CSP_ADDITIONAL_HOSTS = env.list("CSP_ADDITIONAL_HOSTS", [])
PROTOCOL = "http"
if USE_HTTPS :
if USE_HTTPS:
PROTOCOL = "https"
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
PORT = env . int ( "PORT" , 443 if USE_HTTPS else 80 )
if ( USE_HTTPS and PORT == 443 ) or ( not USE_HTTPS and PORT == 80 ) :
PORT = env.int("PORT", 443 if USE_HTTPS else 80)
if (USE_HTTPS and PORT == 443) or (not USE_HTTPS and PORT == 80):
NETLOC = DOMAIN
else :
else:
NETLOC = f"{DOMAIN}:{PORT}"
BASE_URL = f"{PROTOCOL}://{NETLOC}"
CSRF_TRUSTED_ORIGINS = [ BASE_URL ]
CSRF_TRUSTED_ORIGINS = [BASE_URL]
USER_AGENT = f"BookWyrm (BookWyrm/{VERSION}; +{BASE_URL})"
# Storage
USE_S3 = env . bool ( "USE_S3" , False )
USE_AZURE = env . bool ( "USE_AZURE" , False )
S3_SIGNED_URL_EXPIRY = env . int ( "S3_SIGNED_URL_EXPIRY" , 900 )
USE_S3 = env.bool("USE_S3", False)
USE_AZURE = env.bool("USE_AZURE", False)
S3_SIGNED_URL_EXPIRY = env.int("S3_SIGNED_URL_EXPIRY", 900)
if USE_S3 :
if USE_S3:
# AWS settings
AWS_ACCESS_KEY_ID = env ("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = env ("AWS_SECRET_ACCESS_KEY")
AWS_STORAGE_BUCKET_NAME = env ("AWS_STORAGE_BUCKET_NAME")
AWS_S3_CUSTOM_DOMAIN = env ( "AWS_S3_CUSTOM_DOMAIN" , None )
AWS_S3_REGION_NAME = env ( "AWS_S3_REGION_NAME" , "" )
AWS_S3_ENDPOINT_URL = env ( "AWS_S3_ENDPOINT_URL" , None )
AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY")
AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME")
AWS_S3_CUSTOM_DOMAIN = env("AWS_S3_CUSTOM_DOMAIN", None)
AWS_S3_REGION_NAME = env("AWS_S3_REGION_NAME", "")
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", None)
AWS_DEFAULT_ACL = "public-read"
AWS_S3_OBJECT_PARAMETERS = { "CacheControl" : "max-age=86400" }
AWS_S3_URL_PROTOCOL = env ( "AWS_S3_URL_PROTOCOL" , f"{PROTOCOL}:" )
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", f"{PROTOCOL}:")
# Storages
STORAGES = {
"default" : {
"BACKEND" : "storages.backends.s3.S3Storage" ,
"OPTIONS" : {
"location" : "images" ,
"default_acl" : "public-read" ,
"file_overwrite" : False ,
} ,
} ,
"staticfiles" : {
"BACKEND" : "storages.backends.s3.S3Storage" ,
"OPTIONS" : {
"location" : "static" ,
"default_acl" : "public-read" ,
} ,
} ,
"sass_processor" : {
"BACKEND" : "storages.backends.s3.S3Storage" ,
"OPTIONS" : {
"location" : "static" ,
"default_acl" : "public-read" ,
} ,
} ,
"exports" : {
"BACKEND" : "storages.backends.s3.S3Storage" ,
"OPTIONS" : {
"location" : "images" ,
"default_acl" : None ,
"file_overwrite" : False ,
} ,
} ,
"default": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "images",
"default_acl": "public-read",
"file_overwrite": False,
},
},
"staticfiles": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "static",
"default_acl": "public-read",
},
},
"sass_processor": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "static",
"default_acl": "public-read",
},
},
"exports": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "images",
"default_acl": None,
"file_overwrite": False,
},
},
}
# S3 Static settings
STATIC_LOCATION = "static"
@ -402,62 +430,72 @@ if USE_S3 :
MEDIA_FULL_URL = MEDIA_URL
# Content Security Policy
CSP_DEFAULT_SRC = [
"'self'" ,
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}" if AWS_S3_CUSTOM_DOMAIN else None ,
"'self'",
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
if AWS_S3_CUSTOM_DOMAIN
else None,
] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = [
"'self'" ,
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}" if AWS_S3_CUSTOM_DOMAIN else None ,
"'self'",
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
if AWS_S3_CUSTOM_DOMAIN
else None,
] + CSP_ADDITIONAL_HOSTS
elif USE_AZURE :
elif USE_AZURE:
# Azure settings
AZURE_ACCOUNT_NAME = env ("AZURE_ACCOUNT_NAME")
AZURE_ACCOUNT_KEY = env ("AZURE_ACCOUNT_KEY")
AZURE_CONTAINER = env ("AZURE_CONTAINER")
AZURE_CUSTOM_DOMAIN = env ("AZURE_CUSTOM_DOMAIN")
AZURE_ACCOUNT_NAME = env("AZURE_ACCOUNT_NAME")
AZURE_ACCOUNT_KEY = env("AZURE_ACCOUNT_KEY")
AZURE_CONTAINER = env("AZURE_CONTAINER")
AZURE_CUSTOM_DOMAIN = env("AZURE_CUSTOM_DOMAIN")
# Storages
STORAGES = {
"default" : {
"BACKEND" : "storages.backends.azure_storage.AzureStorage" ,
"OPTIONS" : {
"location" : "images" ,
"overwrite_files" : False ,
} ,
} ,
"staticfiles" : {
"BACKEND" : "storages.backends.azure_storage.AzureStorage" ,
"OPTIONS" : {
"location" : "static" ,
} ,
} ,
"exports" : {
"BACKEND" : None , # Not implemented yet
} ,
"default": {
"BACKEND": "storages.backends.azure_storage.AzureStorage",
"OPTIONS": {
"location": "images",
"overwrite_files": False,
},
},
"staticfiles": {
"BACKEND": "storages.backends.azure_storage.AzureStorage",
"OPTIONS": {
"location": "static",
},
},
"exports": {
"BACKEND": None, # not implemented yet
},
}
# Azure Static settings
STATIC_LOCATION = "static"
STATIC_URL = ( f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/" )
STATIC_URL = (
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/"
)
STATIC_FULL_URL = STATIC_URL
# Azure Media settings
MEDIA_LOCATION = "images"
MEDIA_URL = ( f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/" )
MEDIA_URL = (
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/"
)
MEDIA_FULL_URL = MEDIA_URL
# Content Security Policy
CSP_DEFAULT_SRC = [ "'self'" , AZURE_CUSTOM_DOMAIN ] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = [ "'self'" , AZURE_CUSTOM_DOMAIN ] + CSP_ADDITIONAL_HOSTS
else :
CSP_DEFAULT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
else:
# Storages
STORAGES = {
"default" : {
"BACKEND" : "django.core.files.storage.FileSystemStorage" ,
} ,
"staticfiles" : {
"BACKEND" : "django.contrib.staticfiles.storage.StaticFilesStorage" ,
} ,
"exports" : {
"BACKEND" : "django.core.files.storage.FileSystemStorage" ,
"OPTIONS" : { "location" : "exports" } ,
} ,
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
},
"exports": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
"OPTIONS": {
"location": "exports",
},
},
}
# Static settings
STATIC_URL = "/static/"
@ -466,21 +504,29 @@ else :
MEDIA_URL = "/images/"
MEDIA_FULL_URL = BASE_URL + MEDIA_URL
# Content Security Policy
CSP_DEFAULT_SRC = [ "'self'" ] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = [ "'self'" ] + CSP_ADDITIONAL_HOSTS
CSP_DEFAULT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS
CSP_INCLUDE_NONCE_IN = [ "script-src" ]
CSP_INCLUDE_NONCE_IN = ["script-src"]
TWO_FACTOR_LOGIN_MAX_SECONDS = env . int ( "TWO_FACTOR_LOGIN_MAX_SECONDS" , 60 )
TWO_FACTOR_LOGIN_VALIDITY_WINDOW = env . int ( "TWO_FACTOR_LOGIN_VALIDITY_WINDOW" , 2 )
OTEL_EXPORTER_OTLP_ENDPOINT = env("OTEL_EXPORTER_OTLP_ENDPOINT", None)
OTEL_EXPORTER_OTLP_HEADERS = env("OTEL_EXPORTER_OTLP_HEADERS", None)
OTEL_SERVICE_NAME = env("OTEL_SERVICE_NAME", None)
OTEL_EXPORTER_CONSOLE = env.bool("OTEL_EXPORTER_CONSOLE", False)
HTTP_X_FORWARDED_PROTO = env . bool ( "SECURE_PROXY_SSL_HEADER" , False )
if HTTP_X_FORWARDED_PROTO :
SECURE_PROXY_SSL_HEADER = ( "HTTP_X_FORWARDED_PROTO" , "https" )
TWO_FACTOR_LOGIN_MAX_SECONDS = env.int("TWO_FACTOR_LOGIN_MAX_SECONDS", 60)
TWO_FACTOR_LOGIN_VALIDITY_WINDOW = env.int("TWO_FACTOR_LOGIN_VALIDITY_WINDOW", 2)
# Instance Actor for signing GET requests to "secure mode" Mastodon servers.
# Do not change this setting unless you already have an existing user with the same username - in which case you should change it
HTTP_X_FORWARDED_PROTO = env.bool("SECURE_PROXY_SSL_HEADER", False)
if HTTP_X_FORWARDED_PROTO:
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# Instance Actor for signing GET requests to "secure mode"
# Mastodon servers.
# Do not change this setting unless you already have an existing
# user with the same username - in which case you should change it!
INSTANCE_ACTOR_USERNAME = "bookwyrm.instance.actor"
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env (note the difference in variable names).
DATA_UPLOAD_MAX_MEMORY_SIZE = env . int ( "DATA_UPLOAD_MAX_MEMORY_MiB" , 100 ) << 20
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env
# (note the difference in variable names).
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_MiB", 100) << 20