1
0
Fork 0

Merge branch 'main' into csv

This commit is contained in:
Hugh Rundle 2024-06-29 16:03:15 +10:00
commit e5b260e3ee
No known key found for this signature in database
GPG key ID: A7E35779918253F9
400 changed files with 39801 additions and 9079 deletions

View file

@ -20,6 +20,7 @@ from bookwyrm.tasks import app, MISC
logger = logging.getLogger(__name__)
# pylint: disable=invalid-name
TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel)
@ -236,7 +237,7 @@ class ActivityObject:
omit = kwargs.get("omit", ())
data = self.__dict__.copy()
# recursively serialize
for (k, v) in data.items():
for k, v in data.items():
try:
if issubclass(type(v), ActivityObject):
data[k] = v.serialize()
@ -249,7 +250,10 @@ class ActivityObject:
pass
data = {k: v for (k, v) in data.items() if v is not None and k not in omit}
if "@context" not in omit:
data["@context"] = "https://www.w3.org/ns/activitystreams"
data["@context"] = [
"https://www.w3.org/ns/activitystreams",
{"Hashtag": "as:Hashtag"},
]
return data
@ -397,18 +401,14 @@ def resolve_remote_id(
def get_representative():
"""Get or create an actor representing the instance
to sign outgoing HTTP GET requests"""
username = f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}"
email = "bookwyrm@localhost"
try:
user = models.User.objects.get(username=username)
except models.User.DoesNotExist:
user = models.User.objects.create_user(
username=username,
email=email,
local=True,
localname=INSTANCE_ACTOR_USERNAME,
)
return user
return models.User.objects.get_or_create(
username=f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}",
defaults={
"email": "bookwyrm@localhost",
"local": True,
"localname": INSTANCE_ACTOR_USERNAME,
},
)[0]
def get_activitypub_data(url):
@ -427,6 +427,7 @@ def get_activitypub_data(url):
"Date": now,
"Signature": make_signature("get", sender, url, now),
},
timeout=15,
)
except requests.RequestException:
raise ConnectorException()

View file

@ -1,5 +1,5 @@
""" actor serializer """
from dataclasses import dataclass, field
from dataclasses import dataclass
from typing import Dict
from .base_activity import ActivityObject
@ -35,7 +35,7 @@ class Person(ActivityObject):
endpoints: Dict = None
name: str = None
summary: str = None
icon: Image = field(default_factory=lambda: {})
icon: Image = None
bookwyrmUser: bool = False
manuallyApprovesFollowers: str = False
discoverable: str = False

View file

@ -171,9 +171,19 @@ class Reject(Verb):
type: str = "Reject"
def action(self, allow_external_connections=True):
"""reject a follow request"""
obj = self.object.to_model(save=False, allow_create=False)
obj.reject()
"""reject a follow or follow request"""
for model_name in ["UserFollowRequest", "UserFollows", None]:
model = apps.get_model(f"bookwyrm.{model_name}") if model_name else None
if obj := self.object.to_model(
model=model,
save=False,
allow_create=False,
allow_external_connections=allow_external_connections,
):
# Reject the first model that can be built.
obj.reject()
break
@dataclass(init=False)

View file

@ -139,14 +139,14 @@ class ActivityStream(RedisStore):
| (
Q(following=status.user) & Q(following=status.reply_parent.user)
) # if the user is following both authors
).distinct()
)
# only visible to the poster's followers and tagged users
elif status.privacy == "followers":
audience = audience.filter(
Q(following=status.user) # if the user is following the author
)
return audience.distinct()
return audience.distinct("id")
@tracer.start_as_current_span("ActivityStream.get_audience")
def get_audience(self, status):
@ -156,7 +156,7 @@ class ActivityStream(RedisStore):
status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id
).values_list("id", flat=True)
return list(set(list(audience) + list(status_author)))
return list(set(audience) | set(status_author))
def get_stores_for_users(self, user_ids):
"""convert a list of user ids into redis store ids"""
@ -183,15 +183,13 @@ class HomeStream(ActivityStream):
def get_audience(self, status):
trace.get_current_span().set_attribute("stream_id", self.key)
audience = super()._get_audience(status)
if not audience:
return []
# if the user is following the author
audience = audience.filter(following=status.user).values_list("id", flat=True)
# if the user is the post's author
status_author = models.User.objects.filter(
is_active=True, local=True, id=status.user.id
).values_list("id", flat=True)
return list(set(list(audience) + list(status_author)))
return list(set(audience) | set(status_author))
def get_statuses_for_user(self, user):
return models.Status.privacy_filter(
@ -239,9 +237,7 @@ class BooksStream(ActivityStream):
)
audience = super()._get_audience(status)
if not audience:
return models.User.objects.none()
return audience.filter(shelfbook__book__parent_work=work).distinct()
return audience.filter(shelfbook__book__parent_work=work)
def get_audience(self, status):
# only show public statuses on the books feed,

View file

@ -1,4 +1,5 @@
"""Do further startup configuration and initialization"""
import os
import urllib
import logging
@ -14,16 +15,16 @@ def download_file(url, destination):
"""Downloads a file to the given path"""
try:
# Ensure our destination directory exists
os.makedirs(os.path.dirname(destination))
os.makedirs(os.path.dirname(destination), exist_ok=True)
with urllib.request.urlopen(url) as stream:
with open(destination, "b+w") as outfile:
outfile.write(stream.read())
except (urllib.error.HTTPError, urllib.error.URLError):
logger.info("Failed to download file %s", url)
except OSError:
logger.info("Couldn't open font file %s for writing", destination)
except: # pylint: disable=bare-except
logger.info("Unknown error in file download")
except (urllib.error.HTTPError, urllib.error.URLError) as err:
logger.error("Failed to download file %s: %s", url, err)
except OSError as err:
logger.error("Couldn't open font file %s for writing: %s", destination, err)
except Exception as err: # pylint:disable=broad-except
logger.error("Unknown error in file download: %s", err)
class BookwyrmConfig(AppConfig):

View file

@ -43,6 +43,7 @@ def search(
min_confidence: float = 0,
filters: Optional[list[Any]] = None,
return_first: bool = False,
books: Optional[QuerySet[models.Edition]] = None,
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
"""search your local database"""
filters = filters or []
@ -54,13 +55,15 @@ def search(
# first, try searching unique identifiers
# unique identifiers never have spaces, title/author usually do
if not " " in query:
results = search_identifiers(query, *filters, return_first=return_first)
results = search_identifiers(
query, *filters, return_first=return_first, books=books
)
# if there were no identifier results...
if not results:
# then try searching title/author
results = search_title_author(
query, min_confidence, *filters, return_first=return_first
query, min_confidence, *filters, return_first=return_first, books=books
)
return results
@ -98,9 +101,17 @@ def format_search_result(search_result):
def search_identifiers(
query, *filters, return_first=False
query,
*filters,
return_first=False,
books=None,
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
"""tries remote_id, isbn; defined as dedupe fields on the model"""
"""search Editions by deduplication fields
Best for cases when we can assume someone is searching for an exact match on
commonly unique data identifiers like isbn or specific library ids.
"""
books = books or models.Edition.objects
if connectors.maybe_isbn(query):
# Oh did you think the 'S' in ISBN stood for 'standard'?
normalized_isbn = query.strip().upper().rjust(10, "0")
@ -111,7 +122,7 @@ def search_identifiers(
for f in models.Edition._meta.get_fields()
if hasattr(f, "deduplication_field") and f.deduplication_field
]
results = models.Edition.objects.filter(
results = books.filter(
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
).distinct()
@ -121,12 +132,17 @@ def search_identifiers(
def search_title_author(
query, min_confidence, *filters, return_first=False
query,
min_confidence,
*filters,
return_first=False,
books=None,
) -> QuerySet[models.Edition]:
"""searches for title and author"""
books = books or models.Edition.objects
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
results = (
models.Edition.objects.filter(*filters, search_vector=query)
books.filter(*filters, search_vector=query)
.annotate(rank=SearchRank(F("search_vector"), query))
.filter(rank__gt=min_confidence)
.order_by("-rank")
@ -137,7 +153,7 @@ def search_title_author(
# filter out multiple editions of the same work
list_results = []
for work_id in set(editions_of_work[:30]):
for work_id in editions_of_work[:30]:
result = (
results.filter(parent_work=work_id)
.order_by("-rank", "-edition_rank")

View file

@ -3,7 +3,9 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Optional, TypedDict, Any, Callable, Union, Iterator
from urllib.parse import quote_plus
import imghdr
# pylint: disable-next=deprecated-module
import imghdr # Deprecated in 3.11 for removal in 3.13; no good alternative yet
import logging
import re
import asyncio

View file

@ -118,9 +118,11 @@ def get_connectors() -> Iterator[abstract_connector.AbstractConnector]:
def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector:
"""get the connector related to the object's server"""
url = urlparse(remote_id)
identifier = url.netloc
identifier = url.hostname
if not identifier:
raise ValueError("Invalid remote id")
raise ValueError(f"Invalid remote id: {remote_id}")
base_url = f"{url.scheme}://{url.netloc}"
try:
connector_info = models.Connector.objects.get(identifier=identifier)
@ -128,10 +130,10 @@ def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnec
connector_info = models.Connector.objects.create(
identifier=identifier,
connector_file="bookwyrm_connector",
base_url=f"https://{identifier}",
books_url=f"https://{identifier}/book",
covers_url=f"https://{identifier}/images/covers",
search_url=f"https://{identifier}/search?q=",
base_url=base_url,
books_url=f"{base_url}/book",
covers_url=f"{base_url}/images/covers",
search_url=f"{base_url}/search?q=",
priority=2,
)
@ -143,7 +145,9 @@ def load_more_data(connector_id: str, book_id: str) -> None:
"""background the work of getting all 10,000 editions of LoTR"""
connector_info = models.Connector.objects.get(id=connector_id)
connector = load_connector(connector_info)
book = models.Book.objects.select_subclasses().get(id=book_id)
book = models.Book.objects.select_subclasses().get( # type: ignore[no-untyped-call]
id=book_id
)
connector.expand_book_data(book)
@ -154,7 +158,9 @@ def create_edition_task(
"""separate task for each of the 10,000 editions of LoTR"""
connector_info = models.Connector.objects.get(id=connector_id)
connector = load_connector(connector_info)
work = models.Work.objects.select_subclasses().get(id=work_id)
work = models.Work.objects.select_subclasses().get( # type: ignore[no-untyped-call]
id=work_id
)
connector.create_edition_from_data(work, data)
@ -188,8 +194,11 @@ def raise_not_valid_url(url: str) -> None:
if not parsed.scheme in ["http", "https"]:
raise ConnectorException("Invalid scheme: ", url)
if not parsed.hostname:
raise ConnectorException("Hostname missing: ", url)
try:
ipaddress.ip_address(parsed.netloc)
ipaddress.ip_address(parsed.hostname)
raise ConnectorException("Provided url is an IP address: ", url)
except ValueError:
# it's not an IP address, which is good

View file

@ -229,7 +229,7 @@ class Connector(AbstractConnector):
data = get_data(url)
except ConnectorException:
return ""
return data.get("extract", "")
return str(data.get("extract", ""))
def get_remote_id_from_model(self, obj: models.BookDataModel) -> str:
"""use get_remote_id to figure out the link from a model obj"""

View file

@ -4,7 +4,7 @@ from django.template.loader import get_template
from bookwyrm import models, settings
from bookwyrm.tasks import app, EMAIL
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import DOMAIN, BASE_URL
def email_data():
@ -14,6 +14,7 @@ def email_data():
"site_name": site.name,
"logo": site.logo_small_url,
"domain": DOMAIN,
"base_url": BASE_URL,
"user": None,
}

View file

@ -15,6 +15,7 @@ class AuthorForm(CustomForm):
"aliases",
"bio",
"wikipedia_link",
"wikidata",
"website",
"born",
"died",
@ -32,6 +33,7 @@ class AuthorForm(CustomForm):
"wikipedia_link": forms.TextInput(
attrs={"aria-describedby": "desc_wikipedia_link"}
),
"wikidata": forms.TextInput(attrs={"aria-describedby": "desc_wikidata"}),
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),

View file

@ -1,8 +1,9 @@
""" using django model forms """
from django import forms
from file_resubmit.widgets import ResubmitImageWidget
from bookwyrm import models
from bookwyrm.models.fields import ClearableFileInputWithWarning
from .custom_form import CustomForm
from .widgets import ArrayWidget, SelectDateWidget, Select
@ -70,9 +71,7 @@ class EditionForm(CustomForm):
"published_date": SelectDateWidget(
attrs={"aria-describedby": "desc_published_date"}
),
"cover": ClearableFileInputWithWarning(
attrs={"aria-describedby": "desc_cover"}
),
"cover": ResubmitImageWidget(attrs={"aria-describedby": "desc_cover"}),
"physical_format": Select(
attrs={"aria-describedby": "desc_physical_format"}
),

View file

@ -25,6 +25,10 @@ class ImportForm(forms.Form):
csv_file = forms.FileField()
class ImportUserForm(forms.Form):
archive_file = forms.FileField()
class ShelfForm(CustomForm):
class Meta:
model = models.Shelf

View file

@ -1,4 +1,5 @@
""" using django model forms """
from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _
@ -25,7 +26,7 @@ class FileLinkForm(CustomForm):
url = cleaned_data.get("url")
filetype = cleaned_data.get("filetype")
book = cleaned_data.get("book")
domain = urlparse(url).netloc
domain = urlparse(url).hostname
if models.LinkDomain.objects.filter(domain=domain).exists():
status = models.LinkDomain.objects.get(domain=domain).status
if status == "blocked":
@ -37,10 +38,9 @@ class FileLinkForm(CustomForm):
),
)
if (
not self.instance
and models.FileLink.objects.filter(
url=url, book=book, filetype=filetype
).exists()
models.FileLink.objects.filter(url=url, book=book, filetype=filetype)
.exclude(pk=self.instance)
.exists()
):
# pylint: disable=line-too-long
self.add_error(

View file

@ -1,7 +1,7 @@
""" import classes """
from .importer import Importer
from .bookwyrm_import import BookwyrmBooksImporter
from .bookwyrm_import import BookwyrmImporter
from .calibre_import import CalibreImporter
from .goodreads_import import GoodreadsImporter
from .librarything_import import LibrarythingImporter

View file

@ -1,14 +1,24 @@
""" handle reading a csv from BookWyrm """
"""Import data from Bookwyrm export files"""
from django.http import QueryDict
from typing import Any
from . import Importer
from bookwyrm.models import User
from bookwyrm.models.bookwyrm_import_job import BookwyrmImportJob
class BookwyrmBooksImporter(Importer):
"""Goodreads is the default importer, we basically just use the same structure"""
class BookwyrmImporter:
"""Import a Bookwyrm User export file.
This is kind of a combination of an importer and a connector.
"""
service = "BookWyrm"
# pylint: disable=no-self-use
def process_import(
self, user: User, archive_file: bytes, settings: QueryDict
) -> BookwyrmImportJob:
"""import user data from a Bookwyrm export file"""
def __init__(self, *args: Any, **kwargs: Any):
self.row_mappings_guesses.append(("shelf_name", ["shelf_name"]))
super().__init__(*args, **kwargs)
required = [k for k in settings if settings.get(k) == "on"]
job = BookwyrmImportJob.objects.create(
user=user, archive_file=archive_file, required=required
)
return job

View file

@ -14,15 +14,10 @@ class CalibreImporter(Importer):
def __init__(self, *args: Any, **kwargs: Any):
# Add timestamp to row_mappings_guesses for date_added to avoid
# integrity error
row_mappings_guesses = []
for field, mapping in self.row_mappings_guesses:
if field in ("date_added",):
row_mappings_guesses.append((field, mapping + ["timestamp"]))
else:
row_mappings_guesses.append((field, mapping))
self.row_mappings_guesses = row_mappings_guesses
self.row_mappings_guesses = [
(field, mapping + (["timestamp"] if field == "date_added" else []))
for field, mapping in self.row_mappings_guesses
]
super().__init__(*args, **kwargs)
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:

View file

@ -26,7 +26,7 @@ class IsbnHyphenator:
def update_range_message(self) -> None:
"""Download the range message xml file and save it locally"""
response = requests.get(self.__range_message_url)
response = requests.get(self.__range_message_url, timeout=15)
with open(self.__range_file_path, "w", encoding="utf-8") as file:
file.write(response.text)
self.__element_tree = None

View file

@ -1,13 +1,14 @@
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
merge book data objects """
from django.core.management.base import BaseCommand
from django.db.models import Count
from bookwyrm import models
from bookwyrm.management.merge import merge_objects
def dedupe_model(model):
def dedupe_model(model, dry_run=False):
"""combine duplicate editions and update related models"""
print(f"deduplicating {model.__name__}:")
fields = model._meta.get_fields()
dedupe_fields = [
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
@ -16,30 +17,42 @@ def dedupe_model(model):
dupes = (
model.objects.values(field.name)
.annotate(Count(field.name))
.filter(**{"%s__count__gt" % field.name: 1})
.filter(**{f"{field.name}__count__gt": 1})
.exclude(**{field.name: ""})
.exclude(**{f"{field.name}__isnull": True})
)
for dupe in dupes:
value = dupe[field.name]
if not value or value == "":
continue
print("----------")
print(dupe)
objs = model.objects.filter(**{field.name: value}).order_by("id")
canonical = objs.first()
print("keeping", canonical.remote_id)
action = "would merge" if dry_run else "merging"
print(
f"{action} into {model.__name__} {canonical.remote_id} based on {field.name} {value}:"
)
for obj in objs[1:]:
print(obj.remote_id)
merge_objects(canonical, obj)
print(f"- {obj.remote_id}")
absorbed_fields = obj.merge_into(canonical, dry_run=dry_run)
print(f" absorbed fields: {absorbed_fields}")
class Command(BaseCommand):
"""deduplicate allllll the book data models"""
help = "merges duplicate book data"
def add_arguments(self, parser):
"""add the arguments for this command"""
parser.add_argument(
"--dry_run",
action="store_true",
help="don't actually merge, only print what would happen",
)
# pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options):
"""run deduplications"""
dedupe_model(models.Edition)
dedupe_model(models.Work)
dedupe_model(models.Author)
dedupe_model(models.Edition, dry_run=options["dry_run"])
dedupe_model(models.Work, dry_run=options["dry_run"])
dedupe_model(models.Author, dry_run=options["dry_run"])

View file

@ -0,0 +1,43 @@
""" Erase any data stored about deleted users """
import sys
from django.core.management.base import BaseCommand, CommandError
from bookwyrm import models
from bookwyrm.models.user import erase_user_data
# pylint: disable=missing-function-docstring
class Command(BaseCommand):
"""command-line options"""
help = "Remove Two Factor Authorisation from user"
def add_arguments(self, parser): # pylint: disable=no-self-use
parser.add_argument(
"--dryrun",
action="store_true",
help="Preview users to be cleared without altering the database",
)
def handle(self, *args, **options): # pylint: disable=unused-argument
# Check for anything fishy
bad_state = models.User.objects.filter(is_deleted=True, is_active=True)
if bad_state.exists():
raise CommandError(
f"{bad_state.count()} user(s) marked as both active and deleted"
)
deleted_users = models.User.objects.filter(is_deleted=True)
self.stdout.write(f"Found {deleted_users.count()} deleted users")
if options["dryrun"]:
self.stdout.write("\n".join(u.username for u in deleted_users[:5]))
if deleted_users.count() > 5:
self.stdout.write("... and more")
sys.exit()
self.stdout.write("Erasing user data:")
for user_id in deleted_users.values_list("id", flat=True):
erase_user_data.delay(user_id)
self.stdout.write(".", ending="")
self.stdout.write("")
self.stdout.write("Tasks created successfully")

View file

@ -1,54 +0,0 @@
""" Get your admin code to allow install """
from django.core.management.base import BaseCommand
from bookwyrm import models
from bookwyrm.settings import VERSION
# pylint: disable=no-self-use
class Command(BaseCommand):
"""command-line options"""
help = "What version is this?"
def add_arguments(self, parser):
"""specify which function to run"""
parser.add_argument(
"--current",
action="store_true",
help="Version stored in database",
)
parser.add_argument(
"--target",
action="store_true",
help="Version stored in settings",
)
parser.add_argument(
"--update",
action="store_true",
help="Update database version",
)
# pylint: disable=unused-argument
def handle(self, *args, **options):
"""execute init"""
site = models.SiteSettings.objects.get()
current = site.version or "0.0.1"
target = VERSION
if options.get("current"):
print(current)
return
if options.get("target"):
print(target)
return
if options.get("update"):
site.version = target
site.save()
return
if current != target:
print(f"{current}/{target}")
else:
print(current)

View file

@ -1,50 +0,0 @@
from django.db.models import ManyToManyField
def update_related(canonical, obj):
"""update all the models with fk to the object being removed"""
# move related models to canonical
related_models = [
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
]
for (related_field, related_model) in related_models:
# Skip the ManyToMany fields that arent auto-created. These
# should have a corresponding OneToMany field in the model for
# the linking table anyway. If we update it through that model
# instead then we wont lose the extra fields in the linking
# table.
related_field_obj = related_model._meta.get_field(related_field)
if isinstance(related_field_obj, ManyToManyField):
through = related_field_obj.remote_field.through
if not through._meta.auto_created:
continue
related_objs = related_model.objects.filter(**{related_field: obj})
for related_obj in related_objs:
print("replacing in", related_model.__name__, related_field, related_obj.id)
try:
setattr(related_obj, related_field, canonical)
related_obj.save()
except TypeError:
getattr(related_obj, related_field).add(canonical)
getattr(related_obj, related_field).remove(obj)
def copy_data(canonical, obj):
"""try to get the most data possible"""
for data_field in obj._meta.get_fields():
if not hasattr(data_field, "activitypub_field"):
continue
data_value = getattr(obj, data_field.name)
if not data_value:
continue
if not getattr(canonical, data_field.name):
print("setting data field", data_field.name, data_value)
setattr(canonical, data_field.name, data_value)
canonical.save()
def merge_objects(canonical, obj):
copy_data(canonical, obj)
update_related(canonical, obj)
# remove the outdated entry
obj.delete()

View file

@ -1,4 +1,3 @@
from bookwyrm.management.merge import merge_objects
from django.core.management.base import BaseCommand
@ -9,6 +8,11 @@ class MergeCommand(BaseCommand):
"""add the arguments for this command"""
parser.add_argument("--canonical", type=int, required=True)
parser.add_argument("--other", type=int, required=True)
parser.add_argument(
"--dry_run",
action="store_true",
help="don't actually merge, only print what would happen",
)
# pylint: disable=no-self-use,unused-argument
def handle(self, *args, **options):
@ -26,4 +30,8 @@ class MergeCommand(BaseCommand):
print("other book doesnt exist!")
return
merge_objects(canonical, other)
absorbed_fields = other.merge_into(canonical, dry_run=options["dry_run"])
action = "would be" if options["dry_run"] else "has been"
print(f"{other.remote_id} {action} merged into {canonical.remote_id}")
print(f"absorbed fields: {absorbed_fields}")

View file

@ -1,3 +1,4 @@
""" look at all this nice middleware! """
from .timezone_middleware import TimezoneMiddleware
from .ip_middleware import IPBlocklistMiddleware
from .file_too_big import FileTooBig

View file

@ -0,0 +1,30 @@
"""Middleware to display a custom 413 error page"""
from django.http import HttpResponse
from django.shortcuts import render
from django.core.exceptions import RequestDataTooBig
class FileTooBig:
"""Middleware to display a custom page when a
RequestDataTooBig exception is thrown"""
def __init__(self, get_response):
"""boilerplate __init__ from Django docs"""
self.get_response = get_response
def __call__(self, request):
"""If RequestDataTooBig is thrown, render the 413 error page"""
try:
body = request.body # pylint: disable=unused-variable
except RequestDataTooBig:
rendered = render(request, "413.html")
response = HttpResponse(rendered)
return response
response = self.get_response(request)
return response

View file

@ -1,5 +1,5 @@
""" Makes the app aware of the users timezone """
import pytz
import zoneinfo
from django.utils import timezone
@ -12,9 +12,7 @@ class TimezoneMiddleware:
def __call__(self, request):
if request.user.is_authenticated:
timezone.activate(pytz.timezone(request.user.preferred_timezone))
timezone.activate(zoneinfo.ZoneInfo(request.user.preferred_timezone))
else:
timezone.activate(pytz.utc)
response = self.get_response(request)
timezone.deactivate()
return response
timezone.deactivate()
return self.get_response(request)

View file

@ -10,6 +10,7 @@ class Migration(migrations.Migration):
]
operations = [
# The new timezones are "Factory" and "localtime"
migrations.AlterField(
model_name="user",
name="preferred_timezone",

View file

@ -22,17 +22,6 @@ def update_deleted_users(apps, schema_editor):
).update(is_deleted=True)
def erase_deleted_user_data(apps, schema_editor):
"""Retroactively clear user data"""
for user in User.objects.filter(is_deleted=True):
user.erase_user_data()
user.save(
broadcast=False,
update_fields=["email", "avatar", "preview_image", "summary", "name"],
)
user.erase_user_statuses(broadcast=False)
class Migration(migrations.Migration):
dependencies = [
@ -43,7 +32,4 @@ class Migration(migrations.Migration):
migrations.RunPython(
update_deleted_users, reverse_code=migrations.RunPython.noop
),
migrations.RunPython(
erase_deleted_user_data, reverse_code=migrations.RunPython.noop
),
]

View file

@ -0,0 +1,212 @@
# Generated by Django 3.2.20 on 2023-11-16 00:48
from django.conf import settings
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0185_alter_notification_notification_type"),
]
operations = [
migrations.CreateModel(
name="ParentJob",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
(
"created_date",
models.DateTimeField(default=django.utils.timezone.now),
),
(
"updated_date",
models.DateTimeField(default=django.utils.timezone.now),
),
("complete", models.BooleanField(default=False)),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("active", "Active"),
("complete", "Complete"),
("stopped", "Stopped"),
("failed", "Failed"),
],
default="pending",
max_length=50,
null=True,
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
migrations.AddField(
model_name="sitesettings",
name="user_import_time_limit",
field=models.IntegerField(default=48),
),
migrations.AlterField(
model_name="notification",
name="notification_type",
field=models.CharField(
choices=[
("FAVORITE", "Favorite"),
("BOOST", "Boost"),
("REPLY", "Reply"),
("MENTION", "Mention"),
("TAG", "Tag"),
("FOLLOW", "Follow"),
("FOLLOW_REQUEST", "Follow Request"),
("IMPORT", "Import"),
("USER_IMPORT", "User Import"),
("USER_EXPORT", "User Export"),
("ADD", "Add"),
("REPORT", "Report"),
("LINK_DOMAIN", "Link Domain"),
("INVITE", "Invite"),
("ACCEPT", "Accept"),
("JOIN", "Join"),
("LEAVE", "Leave"),
("REMOVE", "Remove"),
("GROUP_PRIVACY", "Group Privacy"),
("GROUP_NAME", "Group Name"),
("GROUP_DESCRIPTION", "Group Description"),
("MOVE", "Move"),
],
max_length=255,
),
),
migrations.CreateModel(
name="BookwyrmExportJob",
fields=[
(
"parentjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.parentjob",
),
),
("export_data", models.FileField(null=True, upload_to="")),
],
options={
"abstract": False,
},
bases=("bookwyrm.parentjob",),
),
migrations.CreateModel(
name="BookwyrmImportJob",
fields=[
(
"parentjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.parentjob",
),
),
("archive_file", models.FileField(blank=True, null=True, upload_to="")),
("import_data", models.JSONField(null=True)),
(
"required",
django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(blank=True, max_length=50),
blank=True,
size=None,
),
),
],
options={
"abstract": False,
},
bases=("bookwyrm.parentjob",),
),
migrations.CreateModel(
name="ChildJob",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
(
"created_date",
models.DateTimeField(default=django.utils.timezone.now),
),
(
"updated_date",
models.DateTimeField(default=django.utils.timezone.now),
),
("complete", models.BooleanField(default=False)),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("active", "Active"),
("complete", "Complete"),
("stopped", "Stopped"),
("failed", "Failed"),
],
default="pending",
max_length=50,
null=True,
),
),
(
"parent_job",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="child_jobs",
to="bookwyrm.parentjob",
),
),
],
options={
"abstract": False,
},
),
migrations.AddField(
model_name="notification",
name="related_user_export",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="bookwyrm.bookwyrmexportjob",
),
),
]

View file

@ -0,0 +1,45 @@
# Generated by Django 3.2.23 on 2023-12-12 23:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0188_theme_loads"),
]
operations = [
migrations.AlterField(
model_name="user",
name="preferred_language",
field=models.CharField(
blank=True,
choices=[
("en-us", "English"),
("ca-es", "Català (Catalan)"),
("de-de", "Deutsch (German)"),
("eo-uy", "Esperanto (Esperanto)"),
("es-es", "Español (Spanish)"),
("eu-es", "Euskara (Basque)"),
("gl-es", "Galego (Galician)"),
("it-it", "Italiano (Italian)"),
("fi-fi", "Suomi (Finnish)"),
("fr-fr", "Français (French)"),
("lt-lt", "Lietuvių (Lithuanian)"),
("nl-nl", "Nederlands (Dutch)"),
("no-no", "Norsk (Norwegian)"),
("pl-pl", "Polski (Polish)"),
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
("pt-pt", "Português Europeu (European Portuguese)"),
("ro-ro", "Română (Romanian)"),
("sv-se", "Svenska (Swedish)"),
("uk-ua", "Українська (Ukrainian)"),
("zh-hans", "简体中文 (Simplified Chinese)"),
("zh-hant", "繁體中文 (Traditional Chinese)"),
],
max_length=255,
null=True,
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2023-11-22 10:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0186_auto_20231116_0048"),
("bookwyrm", "0188_theme_loads"),
]
operations = []

View file

@ -0,0 +1,45 @@
# Generated by Django 3.2.23 on 2023-11-23 19:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0189_merge_0186_auto_20231116_0048_0188_theme_loads"),
]
operations = [
migrations.AlterField(
model_name="notification",
name="notification_type",
field=models.CharField(
choices=[
("FAVORITE", "Favorite"),
("BOOST", "Boost"),
("REPLY", "Reply"),
("MENTION", "Mention"),
("TAG", "Tag"),
("FOLLOW", "Follow"),
("FOLLOW_REQUEST", "Follow Request"),
("IMPORT", "Import"),
("USER_IMPORT", "User Import"),
("USER_EXPORT", "User Export"),
("ADD", "Add"),
("REPORT", "Report"),
("LINK_DOMAIN", "Link Domain"),
("INVITE_REQUEST", "Invite Request"),
("INVITE", "Invite"),
("ACCEPT", "Accept"),
("JOIN", "Join"),
("LEAVE", "Leave"),
("REMOVE", "Remove"),
("GROUP_PRIVACY", "Group Privacy"),
("GROUP_NAME", "Group Name"),
("GROUP_DESCRIPTION", "Group Description"),
("MOVE", "Move"),
],
max_length=255,
),
),
]

View file

@ -0,0 +1,16 @@
# Generated by Django 3.2.20 on 2023-11-24 17:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0188_theme_loads"),
]
operations = [
migrations.RemoveIndex(
model_name="author",
name="bookwyrm_au_search__b050a8_gin",
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-01-02 03:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0189_alter_user_preferred_language"),
("bookwyrm", "0190_alter_notification_notification_type"),
]
operations = []

View file

@ -0,0 +1,76 @@
# Generated by Django 3.2.20 on 2023-11-25 00:47
from importlib import import_module
import re
from django.db import migrations
import pgtrigger.compiler
import pgtrigger.migrations
trigger_migration = import_module("bookwyrm.migrations.0077_auto_20210623_2155")
# it's _very_ convenient for development that this migration be reversible
search_vector_trigger = trigger_migration.Migration.operations[4]
author_search_vector_trigger = trigger_migration.Migration.operations[5]
assert re.search(r"\bCREATE TRIGGER search_vector_trigger\b", search_vector_trigger.sql)
assert re.search(
r"\bCREATE TRIGGER author_search_vector_trigger\b",
author_search_vector_trigger.sql,
)
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0190_book_search_updates"),
]
operations = [
pgtrigger.migrations.AddTrigger(
model_name="book",
trigger=pgtrigger.compiler.Trigger(
name="update_search_vector_on_book_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="new.search_vector := setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C') FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D');RETURN NEW;",
hash="77d6399497c0a89b0bf09d296e33c396da63705c",
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
table="bookwyrm_book",
when="BEFORE",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="author",
trigger=pgtrigger.compiler.Trigger(
name="reset_search_vector_on_author_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
hash="e7bbf08711ff3724c58f4d92fb7a082ffb3d7826",
operation='UPDATE OF "name"',
pgid="pgtrigger_reset_search_vector_on_author_edit_a447c",
table="bookwyrm_author",
when="AFTER",
),
),
),
migrations.RunSQL(
sql="""DROP TRIGGER IF EXISTS search_vector_trigger ON bookwyrm_book;
DROP FUNCTION IF EXISTS book_trigger;
""",
reverse_sql=search_vector_trigger.sql,
),
migrations.RunSQL(
sql="""DROP TRIGGER IF EXISTS author_search_vector_trigger ON bookwyrm_author;
DROP FUNCTION IF EXISTS author_trigger;
""",
reverse_sql=author_search_vector_trigger.sql,
),
migrations.RunSQL(
# Recalculate book search vector for any missed author name changes
# due to bug in JOIN in the old trigger.
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
reverse_sql=migrations.RunSQL.noop,
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 3.2.23 on 2024-01-04 23:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_merge_20240102_0326"),
]
operations = [
migrations.AlterField(
model_name="quotation",
name="endposition",
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="quotation",
name="position",
field=models.TextField(blank=True, null=True),
),
]

View file

@ -0,0 +1,18 @@
# Generated by Django 3.2.23 on 2024-01-02 19:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_merge_20240102_0326"),
]
operations = [
migrations.RenameField(
model_name="sitesettings",
old_name="version",
new_name="available_version",
),
]

View file

@ -0,0 +1,18 @@
# Generated by Django 3.2.23 on 2024-01-16 10:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_merge_20240102_0326"),
]
operations = [
migrations.AddField(
model_name="sitesettings",
name="user_exports_enabled",
field=models.BooleanField(default=False),
),
]

View file

@ -0,0 +1,92 @@
# Generated by Django 3.2.23 on 2024-01-28 02:49
import django.core.serializers.json
from django.db import migrations, models
import django.db.models.deletion
from django.core.files.storage import storages
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
]
operations = [
migrations.AddField(
model_name="bookwyrmexportjob",
name="export_json",
field=models.JSONField(
encoder=django.core.serializers.json.DjangoJSONEncoder, null=True
),
),
migrations.AddField(
model_name="bookwyrmexportjob",
name="json_completed",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="bookwyrmexportjob",
name="export_data",
field=models.FileField(
null=True,
storage=storages["exports"],
upload_to="",
),
),
migrations.CreateModel(
name="AddFileToTar",
fields=[
(
"childjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.childjob",
),
),
(
"parent_export_job",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="child_edition_export_jobs",
to="bookwyrm.bookwyrmexportjob",
),
),
],
options={
"abstract": False,
},
bases=("bookwyrm.childjob",),
),
migrations.CreateModel(
name="AddBookToUserExportJob",
fields=[
(
"childjob_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="bookwyrm.childjob",
),
),
(
"edition",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="bookwyrm.edition",
),
),
],
options={
"abstract": False,
},
bases=("bookwyrm.childjob",),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-02-03 15:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0192_make_page_positions_text"),
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-02-03 16:19
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0192_rename_version_sitesettings_available_version"),
("bookwyrm", "0193_merge_20240203_1539"),
]
operations = []

View file

@ -0,0 +1,46 @@
# Generated by Django 3.2.23 on 2024-02-21 00:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0194_merge_20240203_1619"),
]
operations = [
migrations.AlterField(
model_name="user",
name="preferred_language",
field=models.CharField(
blank=True,
choices=[
("en-us", "English"),
("ca-es", "Català (Catalan)"),
("de-de", "Deutsch (German)"),
("eo-uy", "Esperanto (Esperanto)"),
("es-es", "Español (Spanish)"),
("eu-es", "Euskara (Basque)"),
("gl-es", "Galego (Galician)"),
("it-it", "Italiano (Italian)"),
("ko-kr", "한국어 (Korean)"),
("fi-fi", "Suomi (Finnish)"),
("fr-fr", "Français (French)"),
("lt-lt", "Lietuvių (Lithuanian)"),
("nl-nl", "Nederlands (Dutch)"),
("no-no", "Norsk (Norwegian)"),
("pl-pl", "Polski (Polish)"),
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
("pt-pt", "Português Europeu (European Portuguese)"),
("ro-ro", "Română (Romanian)"),
("sv-se", "Svenska (Swedish)"),
("uk-ua", "Українська (Ukrainian)"),
("zh-hans", "简体中文 (Simplified Chinese)"),
("zh-hant", "繁體中文 (Traditional Chinese)"),
],
max_length=255,
null=True,
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-03-18 17:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0193_auto_20240128_0249"),
("bookwyrm", "0195_alter_user_preferred_language"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.23 on 2024-03-18 00:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0191_migrate_search_vec_triggers_to_pgtriggers"),
("bookwyrm", "0195_alter_user_preferred_language"),
]
operations = []

View file

@ -0,0 +1,41 @@
# Generated by Django 3.2.25 on 2024-03-20 15:15
import django.contrib.postgres.indexes
from django.db import migrations
import pgtrigger.compiler
import pgtrigger.migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0196_merge_pr3134_into_main"),
]
operations = [
migrations.AddIndex(
model_name="author",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin"
),
),
pgtrigger.migrations.AddTrigger(
model_name="author",
trigger=pgtrigger.compiler.Trigger(
name="update_search_vector_on_author_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="new.search_vector := setweight(to_tsvector('simple', new.name), 'A') || setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');RETURN NEW;",
hash="b97919016236d74d0ade51a0769a173ea269da64",
operation='INSERT OR UPDATE OF "name", "aliases", "search_vector"',
pgid="pgtrigger_update_search_vector_on_author_edit_c61cb",
table="bookwyrm_author",
when="BEFORE",
),
),
),
migrations.RunSQL(
# Calculate search vector for all Authors.
sql="UPDATE bookwyrm_author SET search_vector = NULL;",
reverse_sql="UPDATE bookwyrm_author SET search_vector = NULL;",
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-03-24 02:35
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0196_merge_20240318_1737"),
("bookwyrm", "0196_merge_pr3134_into_main"),
]
operations = []

View file

@ -0,0 +1,48 @@
# Generated by Django 3.2.24 on 2024-02-28 21:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0196_merge_pr3134_into_main"),
]
operations = [
migrations.CreateModel(
name="MergedBook",
fields=[
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
(
"merged_into",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="absorbed",
to="bookwyrm.book",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="MergedAuthor",
fields=[
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
(
"merged_into",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="absorbed",
to="bookwyrm.author",
),
),
],
options={
"abstract": False,
},
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 3.2.25 on 2024-03-26 11:37
import bookwyrm.models.bookwyrm_export_job
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_merge_20240324_0235"),
]
operations = [
migrations.AlterField(
model_name="bookwyrmexportjob",
name="export_data",
field=models.FileField(
null=True,
storage=bookwyrm.models.bookwyrm_export_job.select_exports_storage,
upload_to="",
),
),
]

View file

@ -0,0 +1,57 @@
# Generated by Django 3.2.25 on 2024-03-20 15:52
from django.db import migrations
import pgtrigger.compiler
import pgtrigger.migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_author_search_vector"),
]
operations = [
pgtrigger.migrations.RemoveTrigger(
model_name="author",
name="reset_search_vector_on_author_edit",
),
pgtrigger.migrations.RemoveTrigger(
model_name="book",
name="update_search_vector_on_book_edit",
),
pgtrigger.migrations.AddTrigger(
model_name="author",
trigger=pgtrigger.compiler.Trigger(
name="reset_book_search_vector_on_author_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
hash="68422c0f29879c5802b82159dde45297eff53e73",
operation='UPDATE OF "name", "aliases"',
pgid="pgtrigger_reset_book_search_vector_on_author_edit_a50c7",
table="bookwyrm_author",
when="AFTER",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="book",
trigger=pgtrigger.compiler.Trigger(
name="update_search_vector_on_book_edit",
sql=pgtrigger.compiler.UpsertTriggerSql(
func="WITH author_names AS (SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) SELECT setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(name_and_aliases), ' '), '')), 'C') FROM author_names) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D') INTO new.search_vector;RETURN NEW;",
hash="9324f5ca76a6f5e63931881d62d11da11f595b2c",
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
table="bookwyrm_book",
when="BEFORE",
),
),
),
migrations.RunSQL(
# Recalculate search vector for all Books because it now includes
# Author aliases.
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
reverse_sql="UPDATE bookwyrm_book SET search_vector = NULL;",
),
]

View file

@ -0,0 +1,70 @@
# Generated by Django 4.2.11 on 2024-03-29 19:25
import bookwyrm.models.fields
from django.conf import settings
from django.db import migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0198_book_search_vector_author_aliases"),
]
operations = [
migrations.AlterField(
model_name="userblocks",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userblocks",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollowrequest",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollowrequest",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollows",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollows",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="%(class)s_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-03-26 12:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0198_alter_bookwyrmexportjob_export_data"),
("bookwyrm", "0198_book_search_vector_author_aliases"),
]
operations = []

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-02 19:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0198_book_search_vector_author_aliases"),
]
operations = [
migrations.AddIndex(
model_name="status",
index=models.Index(
fields=["remote_id"], name="bookwyrm_st_remote__06aeba_idx"
),
),
]

View file

@ -0,0 +1,633 @@
# Generated by Django 4.2.11 on 2024-04-01 20:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0199_alter_userblocks_user_object_and_more"),
]
operations = [
migrations.AlterField(
model_name="user",
name="preferred_timezone",
field=models.CharField(
choices=[
("Africa/Abidjan", "Africa/Abidjan"),
("Africa/Accra", "Africa/Accra"),
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
("Africa/Algiers", "Africa/Algiers"),
("Africa/Asmara", "Africa/Asmara"),
("Africa/Asmera", "Africa/Asmera"),
("Africa/Bamako", "Africa/Bamako"),
("Africa/Bangui", "Africa/Bangui"),
("Africa/Banjul", "Africa/Banjul"),
("Africa/Bissau", "Africa/Bissau"),
("Africa/Blantyre", "Africa/Blantyre"),
("Africa/Brazzaville", "Africa/Brazzaville"),
("Africa/Bujumbura", "Africa/Bujumbura"),
("Africa/Cairo", "Africa/Cairo"),
("Africa/Casablanca", "Africa/Casablanca"),
("Africa/Ceuta", "Africa/Ceuta"),
("Africa/Conakry", "Africa/Conakry"),
("Africa/Dakar", "Africa/Dakar"),
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
("Africa/Djibouti", "Africa/Djibouti"),
("Africa/Douala", "Africa/Douala"),
("Africa/El_Aaiun", "Africa/El_Aaiun"),
("Africa/Freetown", "Africa/Freetown"),
("Africa/Gaborone", "Africa/Gaborone"),
("Africa/Harare", "Africa/Harare"),
("Africa/Johannesburg", "Africa/Johannesburg"),
("Africa/Juba", "Africa/Juba"),
("Africa/Kampala", "Africa/Kampala"),
("Africa/Khartoum", "Africa/Khartoum"),
("Africa/Kigali", "Africa/Kigali"),
("Africa/Kinshasa", "Africa/Kinshasa"),
("Africa/Lagos", "Africa/Lagos"),
("Africa/Libreville", "Africa/Libreville"),
("Africa/Lome", "Africa/Lome"),
("Africa/Luanda", "Africa/Luanda"),
("Africa/Lubumbashi", "Africa/Lubumbashi"),
("Africa/Lusaka", "Africa/Lusaka"),
("Africa/Malabo", "Africa/Malabo"),
("Africa/Maputo", "Africa/Maputo"),
("Africa/Maseru", "Africa/Maseru"),
("Africa/Mbabane", "Africa/Mbabane"),
("Africa/Mogadishu", "Africa/Mogadishu"),
("Africa/Monrovia", "Africa/Monrovia"),
("Africa/Nairobi", "Africa/Nairobi"),
("Africa/Ndjamena", "Africa/Ndjamena"),
("Africa/Niamey", "Africa/Niamey"),
("Africa/Nouakchott", "Africa/Nouakchott"),
("Africa/Ouagadougou", "Africa/Ouagadougou"),
("Africa/Porto-Novo", "Africa/Porto-Novo"),
("Africa/Sao_Tome", "Africa/Sao_Tome"),
("Africa/Timbuktu", "Africa/Timbuktu"),
("Africa/Tripoli", "Africa/Tripoli"),
("Africa/Tunis", "Africa/Tunis"),
("Africa/Windhoek", "Africa/Windhoek"),
("America/Adak", "America/Adak"),
("America/Anchorage", "America/Anchorage"),
("America/Anguilla", "America/Anguilla"),
("America/Antigua", "America/Antigua"),
("America/Araguaina", "America/Araguaina"),
(
"America/Argentina/Buenos_Aires",
"America/Argentina/Buenos_Aires",
),
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
(
"America/Argentina/ComodRivadavia",
"America/Argentina/ComodRivadavia",
),
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
(
"America/Argentina/Rio_Gallegos",
"America/Argentina/Rio_Gallegos",
),
("America/Argentina/Salta", "America/Argentina/Salta"),
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
("America/Aruba", "America/Aruba"),
("America/Asuncion", "America/Asuncion"),
("America/Atikokan", "America/Atikokan"),
("America/Atka", "America/Atka"),
("America/Bahia", "America/Bahia"),
("America/Bahia_Banderas", "America/Bahia_Banderas"),
("America/Barbados", "America/Barbados"),
("America/Belem", "America/Belem"),
("America/Belize", "America/Belize"),
("America/Blanc-Sablon", "America/Blanc-Sablon"),
("America/Boa_Vista", "America/Boa_Vista"),
("America/Bogota", "America/Bogota"),
("America/Boise", "America/Boise"),
("America/Buenos_Aires", "America/Buenos_Aires"),
("America/Cambridge_Bay", "America/Cambridge_Bay"),
("America/Campo_Grande", "America/Campo_Grande"),
("America/Cancun", "America/Cancun"),
("America/Caracas", "America/Caracas"),
("America/Catamarca", "America/Catamarca"),
("America/Cayenne", "America/Cayenne"),
("America/Cayman", "America/Cayman"),
("America/Chicago", "America/Chicago"),
("America/Chihuahua", "America/Chihuahua"),
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
("America/Coral_Harbour", "America/Coral_Harbour"),
("America/Cordoba", "America/Cordoba"),
("America/Costa_Rica", "America/Costa_Rica"),
("America/Creston", "America/Creston"),
("America/Cuiaba", "America/Cuiaba"),
("America/Curacao", "America/Curacao"),
("America/Danmarkshavn", "America/Danmarkshavn"),
("America/Dawson", "America/Dawson"),
("America/Dawson_Creek", "America/Dawson_Creek"),
("America/Denver", "America/Denver"),
("America/Detroit", "America/Detroit"),
("America/Dominica", "America/Dominica"),
("America/Edmonton", "America/Edmonton"),
("America/Eirunepe", "America/Eirunepe"),
("America/El_Salvador", "America/El_Salvador"),
("America/Ensenada", "America/Ensenada"),
("America/Fort_Nelson", "America/Fort_Nelson"),
("America/Fort_Wayne", "America/Fort_Wayne"),
("America/Fortaleza", "America/Fortaleza"),
("America/Glace_Bay", "America/Glace_Bay"),
("America/Godthab", "America/Godthab"),
("America/Goose_Bay", "America/Goose_Bay"),
("America/Grand_Turk", "America/Grand_Turk"),
("America/Grenada", "America/Grenada"),
("America/Guadeloupe", "America/Guadeloupe"),
("America/Guatemala", "America/Guatemala"),
("America/Guayaquil", "America/Guayaquil"),
("America/Guyana", "America/Guyana"),
("America/Halifax", "America/Halifax"),
("America/Havana", "America/Havana"),
("America/Hermosillo", "America/Hermosillo"),
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
("America/Indiana/Knox", "America/Indiana/Knox"),
("America/Indiana/Marengo", "America/Indiana/Marengo"),
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
("America/Indiana/Vevay", "America/Indiana/Vevay"),
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
("America/Indiana/Winamac", "America/Indiana/Winamac"),
("America/Indianapolis", "America/Indianapolis"),
("America/Inuvik", "America/Inuvik"),
("America/Iqaluit", "America/Iqaluit"),
("America/Jamaica", "America/Jamaica"),
("America/Jujuy", "America/Jujuy"),
("America/Juneau", "America/Juneau"),
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
("America/Knox_IN", "America/Knox_IN"),
("America/Kralendijk", "America/Kralendijk"),
("America/La_Paz", "America/La_Paz"),
("America/Lima", "America/Lima"),
("America/Los_Angeles", "America/Los_Angeles"),
("America/Louisville", "America/Louisville"),
("America/Lower_Princes", "America/Lower_Princes"),
("America/Maceio", "America/Maceio"),
("America/Managua", "America/Managua"),
("America/Manaus", "America/Manaus"),
("America/Marigot", "America/Marigot"),
("America/Martinique", "America/Martinique"),
("America/Matamoros", "America/Matamoros"),
("America/Mazatlan", "America/Mazatlan"),
("America/Mendoza", "America/Mendoza"),
("America/Menominee", "America/Menominee"),
("America/Merida", "America/Merida"),
("America/Metlakatla", "America/Metlakatla"),
("America/Mexico_City", "America/Mexico_City"),
("America/Miquelon", "America/Miquelon"),
("America/Moncton", "America/Moncton"),
("America/Monterrey", "America/Monterrey"),
("America/Montevideo", "America/Montevideo"),
("America/Montreal", "America/Montreal"),
("America/Montserrat", "America/Montserrat"),
("America/Nassau", "America/Nassau"),
("America/New_York", "America/New_York"),
("America/Nipigon", "America/Nipigon"),
("America/Nome", "America/Nome"),
("America/Noronha", "America/Noronha"),
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
("America/North_Dakota/Center", "America/North_Dakota/Center"),
(
"America/North_Dakota/New_Salem",
"America/North_Dakota/New_Salem",
),
("America/Nuuk", "America/Nuuk"),
("America/Ojinaga", "America/Ojinaga"),
("America/Panama", "America/Panama"),
("America/Pangnirtung", "America/Pangnirtung"),
("America/Paramaribo", "America/Paramaribo"),
("America/Phoenix", "America/Phoenix"),
("America/Port-au-Prince", "America/Port-au-Prince"),
("America/Port_of_Spain", "America/Port_of_Spain"),
("America/Porto_Acre", "America/Porto_Acre"),
("America/Porto_Velho", "America/Porto_Velho"),
("America/Puerto_Rico", "America/Puerto_Rico"),
("America/Punta_Arenas", "America/Punta_Arenas"),
("America/Rainy_River", "America/Rainy_River"),
("America/Rankin_Inlet", "America/Rankin_Inlet"),
("America/Recife", "America/Recife"),
("America/Regina", "America/Regina"),
("America/Resolute", "America/Resolute"),
("America/Rio_Branco", "America/Rio_Branco"),
("America/Rosario", "America/Rosario"),
("America/Santa_Isabel", "America/Santa_Isabel"),
("America/Santarem", "America/Santarem"),
("America/Santiago", "America/Santiago"),
("America/Santo_Domingo", "America/Santo_Domingo"),
("America/Sao_Paulo", "America/Sao_Paulo"),
("America/Scoresbysund", "America/Scoresbysund"),
("America/Shiprock", "America/Shiprock"),
("America/Sitka", "America/Sitka"),
("America/St_Barthelemy", "America/St_Barthelemy"),
("America/St_Johns", "America/St_Johns"),
("America/St_Kitts", "America/St_Kitts"),
("America/St_Lucia", "America/St_Lucia"),
("America/St_Thomas", "America/St_Thomas"),
("America/St_Vincent", "America/St_Vincent"),
("America/Swift_Current", "America/Swift_Current"),
("America/Tegucigalpa", "America/Tegucigalpa"),
("America/Thule", "America/Thule"),
("America/Thunder_Bay", "America/Thunder_Bay"),
("America/Tijuana", "America/Tijuana"),
("America/Toronto", "America/Toronto"),
("America/Tortola", "America/Tortola"),
("America/Vancouver", "America/Vancouver"),
("America/Virgin", "America/Virgin"),
("America/Whitehorse", "America/Whitehorse"),
("America/Winnipeg", "America/Winnipeg"),
("America/Yakutat", "America/Yakutat"),
("America/Yellowknife", "America/Yellowknife"),
("Antarctica/Casey", "Antarctica/Casey"),
("Antarctica/Davis", "Antarctica/Davis"),
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
("Antarctica/Macquarie", "Antarctica/Macquarie"),
("Antarctica/Mawson", "Antarctica/Mawson"),
("Antarctica/McMurdo", "Antarctica/McMurdo"),
("Antarctica/Palmer", "Antarctica/Palmer"),
("Antarctica/Rothera", "Antarctica/Rothera"),
("Antarctica/South_Pole", "Antarctica/South_Pole"),
("Antarctica/Syowa", "Antarctica/Syowa"),
("Antarctica/Troll", "Antarctica/Troll"),
("Antarctica/Vostok", "Antarctica/Vostok"),
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
("Asia/Aden", "Asia/Aden"),
("Asia/Almaty", "Asia/Almaty"),
("Asia/Amman", "Asia/Amman"),
("Asia/Anadyr", "Asia/Anadyr"),
("Asia/Aqtau", "Asia/Aqtau"),
("Asia/Aqtobe", "Asia/Aqtobe"),
("Asia/Ashgabat", "Asia/Ashgabat"),
("Asia/Ashkhabad", "Asia/Ashkhabad"),
("Asia/Atyrau", "Asia/Atyrau"),
("Asia/Baghdad", "Asia/Baghdad"),
("Asia/Bahrain", "Asia/Bahrain"),
("Asia/Baku", "Asia/Baku"),
("Asia/Bangkok", "Asia/Bangkok"),
("Asia/Barnaul", "Asia/Barnaul"),
("Asia/Beirut", "Asia/Beirut"),
("Asia/Bishkek", "Asia/Bishkek"),
("Asia/Brunei", "Asia/Brunei"),
("Asia/Calcutta", "Asia/Calcutta"),
("Asia/Chita", "Asia/Chita"),
("Asia/Choibalsan", "Asia/Choibalsan"),
("Asia/Chongqing", "Asia/Chongqing"),
("Asia/Chungking", "Asia/Chungking"),
("Asia/Colombo", "Asia/Colombo"),
("Asia/Dacca", "Asia/Dacca"),
("Asia/Damascus", "Asia/Damascus"),
("Asia/Dhaka", "Asia/Dhaka"),
("Asia/Dili", "Asia/Dili"),
("Asia/Dubai", "Asia/Dubai"),
("Asia/Dushanbe", "Asia/Dushanbe"),
("Asia/Famagusta", "Asia/Famagusta"),
("Asia/Gaza", "Asia/Gaza"),
("Asia/Harbin", "Asia/Harbin"),
("Asia/Hebron", "Asia/Hebron"),
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
("Asia/Hong_Kong", "Asia/Hong_Kong"),
("Asia/Hovd", "Asia/Hovd"),
("Asia/Irkutsk", "Asia/Irkutsk"),
("Asia/Istanbul", "Asia/Istanbul"),
("Asia/Jakarta", "Asia/Jakarta"),
("Asia/Jayapura", "Asia/Jayapura"),
("Asia/Jerusalem", "Asia/Jerusalem"),
("Asia/Kabul", "Asia/Kabul"),
("Asia/Kamchatka", "Asia/Kamchatka"),
("Asia/Karachi", "Asia/Karachi"),
("Asia/Kashgar", "Asia/Kashgar"),
("Asia/Kathmandu", "Asia/Kathmandu"),
("Asia/Katmandu", "Asia/Katmandu"),
("Asia/Khandyga", "Asia/Khandyga"),
("Asia/Kolkata", "Asia/Kolkata"),
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
("Asia/Kuching", "Asia/Kuching"),
("Asia/Kuwait", "Asia/Kuwait"),
("Asia/Macao", "Asia/Macao"),
("Asia/Macau", "Asia/Macau"),
("Asia/Magadan", "Asia/Magadan"),
("Asia/Makassar", "Asia/Makassar"),
("Asia/Manila", "Asia/Manila"),
("Asia/Muscat", "Asia/Muscat"),
("Asia/Nicosia", "Asia/Nicosia"),
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
("Asia/Novosibirsk", "Asia/Novosibirsk"),
("Asia/Omsk", "Asia/Omsk"),
("Asia/Oral", "Asia/Oral"),
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
("Asia/Pontianak", "Asia/Pontianak"),
("Asia/Pyongyang", "Asia/Pyongyang"),
("Asia/Qatar", "Asia/Qatar"),
("Asia/Qostanay", "Asia/Qostanay"),
("Asia/Qyzylorda", "Asia/Qyzylorda"),
("Asia/Rangoon", "Asia/Rangoon"),
("Asia/Riyadh", "Asia/Riyadh"),
("Asia/Saigon", "Asia/Saigon"),
("Asia/Sakhalin", "Asia/Sakhalin"),
("Asia/Samarkand", "Asia/Samarkand"),
("Asia/Seoul", "Asia/Seoul"),
("Asia/Shanghai", "Asia/Shanghai"),
("Asia/Singapore", "Asia/Singapore"),
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
("Asia/Taipei", "Asia/Taipei"),
("Asia/Tashkent", "Asia/Tashkent"),
("Asia/Tbilisi", "Asia/Tbilisi"),
("Asia/Tehran", "Asia/Tehran"),
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
("Asia/Thimbu", "Asia/Thimbu"),
("Asia/Thimphu", "Asia/Thimphu"),
("Asia/Tokyo", "Asia/Tokyo"),
("Asia/Tomsk", "Asia/Tomsk"),
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
("Asia/Urumqi", "Asia/Urumqi"),
("Asia/Ust-Nera", "Asia/Ust-Nera"),
("Asia/Vientiane", "Asia/Vientiane"),
("Asia/Vladivostok", "Asia/Vladivostok"),
("Asia/Yakutsk", "Asia/Yakutsk"),
("Asia/Yangon", "Asia/Yangon"),
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
("Asia/Yerevan", "Asia/Yerevan"),
("Atlantic/Azores", "Atlantic/Azores"),
("Atlantic/Bermuda", "Atlantic/Bermuda"),
("Atlantic/Canary", "Atlantic/Canary"),
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
("Atlantic/Faeroe", "Atlantic/Faeroe"),
("Atlantic/Faroe", "Atlantic/Faroe"),
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
("Atlantic/Madeira", "Atlantic/Madeira"),
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
("Atlantic/St_Helena", "Atlantic/St_Helena"),
("Atlantic/Stanley", "Atlantic/Stanley"),
("Australia/ACT", "Australia/ACT"),
("Australia/Adelaide", "Australia/Adelaide"),
("Australia/Brisbane", "Australia/Brisbane"),
("Australia/Broken_Hill", "Australia/Broken_Hill"),
("Australia/Canberra", "Australia/Canberra"),
("Australia/Currie", "Australia/Currie"),
("Australia/Darwin", "Australia/Darwin"),
("Australia/Eucla", "Australia/Eucla"),
("Australia/Hobart", "Australia/Hobart"),
("Australia/LHI", "Australia/LHI"),
("Australia/Lindeman", "Australia/Lindeman"),
("Australia/Lord_Howe", "Australia/Lord_Howe"),
("Australia/Melbourne", "Australia/Melbourne"),
("Australia/NSW", "Australia/NSW"),
("Australia/North", "Australia/North"),
("Australia/Perth", "Australia/Perth"),
("Australia/Queensland", "Australia/Queensland"),
("Australia/South", "Australia/South"),
("Australia/Sydney", "Australia/Sydney"),
("Australia/Tasmania", "Australia/Tasmania"),
("Australia/Victoria", "Australia/Victoria"),
("Australia/West", "Australia/West"),
("Australia/Yancowinna", "Australia/Yancowinna"),
("Brazil/Acre", "Brazil/Acre"),
("Brazil/DeNoronha", "Brazil/DeNoronha"),
("Brazil/East", "Brazil/East"),
("Brazil/West", "Brazil/West"),
("CET", "CET"),
("CST6CDT", "CST6CDT"),
("Canada/Atlantic", "Canada/Atlantic"),
("Canada/Central", "Canada/Central"),
("Canada/Eastern", "Canada/Eastern"),
("Canada/Mountain", "Canada/Mountain"),
("Canada/Newfoundland", "Canada/Newfoundland"),
("Canada/Pacific", "Canada/Pacific"),
("Canada/Saskatchewan", "Canada/Saskatchewan"),
("Canada/Yukon", "Canada/Yukon"),
("Chile/Continental", "Chile/Continental"),
("Chile/EasterIsland", "Chile/EasterIsland"),
("Cuba", "Cuba"),
("EET", "EET"),
("EST", "EST"),
("EST5EDT", "EST5EDT"),
("Egypt", "Egypt"),
("Eire", "Eire"),
("Etc/GMT", "Etc/GMT"),
("Etc/GMT+0", "Etc/GMT+0"),
("Etc/GMT+1", "Etc/GMT+1"),
("Etc/GMT+10", "Etc/GMT+10"),
("Etc/GMT+11", "Etc/GMT+11"),
("Etc/GMT+12", "Etc/GMT+12"),
("Etc/GMT+2", "Etc/GMT+2"),
("Etc/GMT+3", "Etc/GMT+3"),
("Etc/GMT+4", "Etc/GMT+4"),
("Etc/GMT+5", "Etc/GMT+5"),
("Etc/GMT+6", "Etc/GMT+6"),
("Etc/GMT+7", "Etc/GMT+7"),
("Etc/GMT+8", "Etc/GMT+8"),
("Etc/GMT+9", "Etc/GMT+9"),
("Etc/GMT-0", "Etc/GMT-0"),
("Etc/GMT-1", "Etc/GMT-1"),
("Etc/GMT-10", "Etc/GMT-10"),
("Etc/GMT-11", "Etc/GMT-11"),
("Etc/GMT-12", "Etc/GMT-12"),
("Etc/GMT-13", "Etc/GMT-13"),
("Etc/GMT-14", "Etc/GMT-14"),
("Etc/GMT-2", "Etc/GMT-2"),
("Etc/GMT-3", "Etc/GMT-3"),
("Etc/GMT-4", "Etc/GMT-4"),
("Etc/GMT-5", "Etc/GMT-5"),
("Etc/GMT-6", "Etc/GMT-6"),
("Etc/GMT-7", "Etc/GMT-7"),
("Etc/GMT-8", "Etc/GMT-8"),
("Etc/GMT-9", "Etc/GMT-9"),
("Etc/GMT0", "Etc/GMT0"),
("Etc/Greenwich", "Etc/Greenwich"),
("Etc/UCT", "Etc/UCT"),
("Etc/UTC", "Etc/UTC"),
("Etc/Universal", "Etc/Universal"),
("Etc/Zulu", "Etc/Zulu"),
("Europe/Amsterdam", "Europe/Amsterdam"),
("Europe/Andorra", "Europe/Andorra"),
("Europe/Astrakhan", "Europe/Astrakhan"),
("Europe/Athens", "Europe/Athens"),
("Europe/Belfast", "Europe/Belfast"),
("Europe/Belgrade", "Europe/Belgrade"),
("Europe/Berlin", "Europe/Berlin"),
("Europe/Bratislava", "Europe/Bratislava"),
("Europe/Brussels", "Europe/Brussels"),
("Europe/Bucharest", "Europe/Bucharest"),
("Europe/Budapest", "Europe/Budapest"),
("Europe/Busingen", "Europe/Busingen"),
("Europe/Chisinau", "Europe/Chisinau"),
("Europe/Copenhagen", "Europe/Copenhagen"),
("Europe/Dublin", "Europe/Dublin"),
("Europe/Gibraltar", "Europe/Gibraltar"),
("Europe/Guernsey", "Europe/Guernsey"),
("Europe/Helsinki", "Europe/Helsinki"),
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
("Europe/Istanbul", "Europe/Istanbul"),
("Europe/Jersey", "Europe/Jersey"),
("Europe/Kaliningrad", "Europe/Kaliningrad"),
("Europe/Kiev", "Europe/Kiev"),
("Europe/Kirov", "Europe/Kirov"),
("Europe/Kyiv", "Europe/Kyiv"),
("Europe/Lisbon", "Europe/Lisbon"),
("Europe/Ljubljana", "Europe/Ljubljana"),
("Europe/London", "Europe/London"),
("Europe/Luxembourg", "Europe/Luxembourg"),
("Europe/Madrid", "Europe/Madrid"),
("Europe/Malta", "Europe/Malta"),
("Europe/Mariehamn", "Europe/Mariehamn"),
("Europe/Minsk", "Europe/Minsk"),
("Europe/Monaco", "Europe/Monaco"),
("Europe/Moscow", "Europe/Moscow"),
("Europe/Nicosia", "Europe/Nicosia"),
("Europe/Oslo", "Europe/Oslo"),
("Europe/Paris", "Europe/Paris"),
("Europe/Podgorica", "Europe/Podgorica"),
("Europe/Prague", "Europe/Prague"),
("Europe/Riga", "Europe/Riga"),
("Europe/Rome", "Europe/Rome"),
("Europe/Samara", "Europe/Samara"),
("Europe/San_Marino", "Europe/San_Marino"),
("Europe/Sarajevo", "Europe/Sarajevo"),
("Europe/Saratov", "Europe/Saratov"),
("Europe/Simferopol", "Europe/Simferopol"),
("Europe/Skopje", "Europe/Skopje"),
("Europe/Sofia", "Europe/Sofia"),
("Europe/Stockholm", "Europe/Stockholm"),
("Europe/Tallinn", "Europe/Tallinn"),
("Europe/Tirane", "Europe/Tirane"),
("Europe/Tiraspol", "Europe/Tiraspol"),
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
("Europe/Uzhgorod", "Europe/Uzhgorod"),
("Europe/Vaduz", "Europe/Vaduz"),
("Europe/Vatican", "Europe/Vatican"),
("Europe/Vienna", "Europe/Vienna"),
("Europe/Vilnius", "Europe/Vilnius"),
("Europe/Volgograd", "Europe/Volgograd"),
("Europe/Warsaw", "Europe/Warsaw"),
("Europe/Zagreb", "Europe/Zagreb"),
("Europe/Zaporozhye", "Europe/Zaporozhye"),
("Europe/Zurich", "Europe/Zurich"),
("Factory", "Factory"),
("GB", "GB"),
("GB-Eire", "GB-Eire"),
("GMT", "GMT"),
("GMT+0", "GMT+0"),
("GMT-0", "GMT-0"),
("GMT0", "GMT0"),
("Greenwich", "Greenwich"),
("HST", "HST"),
("Hongkong", "Hongkong"),
("Iceland", "Iceland"),
("Indian/Antananarivo", "Indian/Antananarivo"),
("Indian/Chagos", "Indian/Chagos"),
("Indian/Christmas", "Indian/Christmas"),
("Indian/Cocos", "Indian/Cocos"),
("Indian/Comoro", "Indian/Comoro"),
("Indian/Kerguelen", "Indian/Kerguelen"),
("Indian/Mahe", "Indian/Mahe"),
("Indian/Maldives", "Indian/Maldives"),
("Indian/Mauritius", "Indian/Mauritius"),
("Indian/Mayotte", "Indian/Mayotte"),
("Indian/Reunion", "Indian/Reunion"),
("Iran", "Iran"),
("Israel", "Israel"),
("Jamaica", "Jamaica"),
("Japan", "Japan"),
("Kwajalein", "Kwajalein"),
("Libya", "Libya"),
("MET", "MET"),
("MST", "MST"),
("MST7MDT", "MST7MDT"),
("Mexico/BajaNorte", "Mexico/BajaNorte"),
("Mexico/BajaSur", "Mexico/BajaSur"),
("Mexico/General", "Mexico/General"),
("NZ", "NZ"),
("NZ-CHAT", "NZ-CHAT"),
("Navajo", "Navajo"),
("PRC", "PRC"),
("PST8PDT", "PST8PDT"),
("Pacific/Apia", "Pacific/Apia"),
("Pacific/Auckland", "Pacific/Auckland"),
("Pacific/Bougainville", "Pacific/Bougainville"),
("Pacific/Chatham", "Pacific/Chatham"),
("Pacific/Chuuk", "Pacific/Chuuk"),
("Pacific/Easter", "Pacific/Easter"),
("Pacific/Efate", "Pacific/Efate"),
("Pacific/Enderbury", "Pacific/Enderbury"),
("Pacific/Fakaofo", "Pacific/Fakaofo"),
("Pacific/Fiji", "Pacific/Fiji"),
("Pacific/Funafuti", "Pacific/Funafuti"),
("Pacific/Galapagos", "Pacific/Galapagos"),
("Pacific/Gambier", "Pacific/Gambier"),
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
("Pacific/Guam", "Pacific/Guam"),
("Pacific/Honolulu", "Pacific/Honolulu"),
("Pacific/Johnston", "Pacific/Johnston"),
("Pacific/Kanton", "Pacific/Kanton"),
("Pacific/Kiritimati", "Pacific/Kiritimati"),
("Pacific/Kosrae", "Pacific/Kosrae"),
("Pacific/Kwajalein", "Pacific/Kwajalein"),
("Pacific/Majuro", "Pacific/Majuro"),
("Pacific/Marquesas", "Pacific/Marquesas"),
("Pacific/Midway", "Pacific/Midway"),
("Pacific/Nauru", "Pacific/Nauru"),
("Pacific/Niue", "Pacific/Niue"),
("Pacific/Norfolk", "Pacific/Norfolk"),
("Pacific/Noumea", "Pacific/Noumea"),
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
("Pacific/Palau", "Pacific/Palau"),
("Pacific/Pitcairn", "Pacific/Pitcairn"),
("Pacific/Pohnpei", "Pacific/Pohnpei"),
("Pacific/Ponape", "Pacific/Ponape"),
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
("Pacific/Rarotonga", "Pacific/Rarotonga"),
("Pacific/Saipan", "Pacific/Saipan"),
("Pacific/Samoa", "Pacific/Samoa"),
("Pacific/Tahiti", "Pacific/Tahiti"),
("Pacific/Tarawa", "Pacific/Tarawa"),
("Pacific/Tongatapu", "Pacific/Tongatapu"),
("Pacific/Truk", "Pacific/Truk"),
("Pacific/Wake", "Pacific/Wake"),
("Pacific/Wallis", "Pacific/Wallis"),
("Pacific/Yap", "Pacific/Yap"),
("Poland", "Poland"),
("Portugal", "Portugal"),
("ROC", "ROC"),
("ROK", "ROK"),
("Singapore", "Singapore"),
("Turkey", "Turkey"),
("UCT", "UCT"),
("US/Alaska", "US/Alaska"),
("US/Aleutian", "US/Aleutian"),
("US/Arizona", "US/Arizona"),
("US/Central", "US/Central"),
("US/East-Indiana", "US/East-Indiana"),
("US/Eastern", "US/Eastern"),
("US/Hawaii", "US/Hawaii"),
("US/Indiana-Starke", "US/Indiana-Starke"),
("US/Michigan", "US/Michigan"),
("US/Mountain", "US/Mountain"),
("US/Pacific", "US/Pacific"),
("US/Samoa", "US/Samoa"),
("UTC", "UTC"),
("Universal", "Universal"),
("W-SU", "W-SU"),
("WET", "WET"),
("Zulu", "Zulu"),
("localtime", "localtime"),
],
default="UTC",
max_length=255,
),
),
]

View file

@ -0,0 +1,27 @@
# Generated by Django 3.2.25 on 2024-03-27 19:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0199_merge_20240326_1217"),
]
operations = [
migrations.RemoveField(
model_name="addfiletotar",
name="childjob_ptr",
),
migrations.RemoveField(
model_name="addfiletotar",
name="parent_export_job",
),
migrations.DeleteModel(
name="AddBookToUserExportJob",
),
migrations.DeleteModel(
name="AddFileToTar",
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0199_status_bookwyrm_st_remote__06aeba_idx"),
]
operations = [
migrations.AddIndex(
model_name="status",
index=models.Index(
fields=["thread_id"], name="bookwyrm_st_thread__cf064f_idx"
),
),
]

View file

@ -0,0 +1,39 @@
# Generated by Django 4.2.11 on 2024-04-01 21:09
import bookwyrm.models.fields
from django.db import migrations, models
from django.contrib.postgres.operations import CreateCollation
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0200_alter_user_preferred_timezone"),
]
operations = [
CreateCollation(
"case_insensitive",
provider="icu",
locale="und-u-ks-level2",
deterministic=False,
),
migrations.AlterField(
model_name="hashtag",
name="name",
field=bookwyrm.models.fields.CharField(
db_collation="case_insensitive", max_length=256
),
),
migrations.AlterField(
model_name="user",
name="localname",
field=models.CharField(
db_collation="case_insensitive",
max_length=255,
null=True,
unique=True,
validators=[bookwyrm.models.fields.validate_localname],
),
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0200_status_bookwyrm_st_thread__cf064f_idx"),
]
operations = [
migrations.AddIndex(
model_name="keypair",
index=models.Index(
fields=["remote_id"], name="bookwyrm_ke_remote__472927_idx"
),
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0201_keypair_bookwyrm_ke_remote__472927_idx"),
]
operations = [
migrations.AddIndex(
model_name="user",
index=models.Index(
fields=["username"], name="bookwyrm_us_usernam_b2546d_idx"
),
),
]

View file

@ -0,0 +1,19 @@
# Generated by Django 3.2.25 on 2024-04-03 19:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0202_user_bookwyrm_us_usernam_b2546d_idx"),
]
operations = [
migrations.AddIndex(
model_name="user",
index=models.Index(
fields=["is_active", "local"], name="bookwyrm_us_is_acti_972dc4_idx"
),
),
]

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-04-09 10:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0197_mergedauthor_mergedbook"),
("bookwyrm", "0203_user_bookwyrm_us_is_acti_972dc4_idx"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 4.2.11 on 2024-04-10 20:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0201_alter_hashtag_name_alter_user_localname"),
("bookwyrm", "0204_merge_20240409_1042"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 3.2.25 on 2024-04-13 02:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0200_auto_20240327_1914"),
("bookwyrm", "0204_merge_20240409_1042"),
]
operations = []

View file

@ -0,0 +1,13 @@
# Generated by Django 4.2.11 on 2024-04-15 15:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0205_merge_20240410_2022"),
("bookwyrm", "0205_merge_20240413_0232"),
]
operations = []

View file

@ -26,6 +26,8 @@ from .federated_server import FederatedServer
from .group import Group, GroupMember, GroupMemberInvitation
from .import_job import ImportJob, ImportItem
from .bookwyrm_import_job import BookwyrmImportJob
from .bookwyrm_export_job import BookwyrmExportJob
from .move import MoveUser

View file

@ -152,8 +152,9 @@ class ActivitypubMixin:
# find anyone who's tagged in a status, for example
mentions = self.recipients if hasattr(self, "recipients") else []
# we always send activities to explicitly mentioned users' inboxes
recipients = [u.inbox for u in mentions or [] if not u.local]
# we always send activities to explicitly mentioned users (using shared inboxes
# where available to avoid duplicate submissions to a given instance)
recipients = {u.shared_inbox or u.inbox for u in mentions if not u.local}
# unless it's a dm, all the followers should receive the activity
if privacy != "direct":
@ -168,23 +169,23 @@ class ActivitypubMixin:
# filter users first by whether they're using the desired software
# this lets us send book updates only to other bw servers
if software:
queryset = queryset.filter(bookwyrm_user=(software == "bookwyrm"))
queryset = queryset.filter(bookwyrm_user=software == "bookwyrm")
# if there's a user, we only want to send to the user's followers
if user:
queryset = queryset.filter(following=user)
# ideally, we will send to shared inboxes for efficiency
shared_inboxes = (
queryset.filter(shared_inbox__isnull=False)
.values_list("shared_inbox", flat=True)
.distinct()
# as above, we prefer shared inboxes if available
recipients.update(
queryset.filter(shared_inbox__isnull=False).values_list(
"shared_inbox", flat=True
)
)
# but not everyone has a shared inbox
inboxes = queryset.filter(shared_inbox__isnull=True).values_list(
"inbox", flat=True
recipients.update(
queryset.filter(shared_inbox__isnull=True).values_list(
"inbox", flat=True
)
)
recipients += list(shared_inboxes) + list(inboxes)
return list(set(recipients))
return list(recipients)
def to_activity_dataclass(self):
"""convert from a model to an activity"""
@ -205,14 +206,10 @@ class ObjectMixin(ActivitypubMixin):
created: Optional[bool] = None,
software: Any = None,
priority: str = BROADCAST,
broadcast: bool = True,
**kwargs: Any,
) -> None:
"""broadcast created/updated/deleted objects as appropriate"""
broadcast = kwargs.get("broadcast", True)
# this bonus kwarg would cause an error in the base save method
if "broadcast" in kwargs:
del kwargs["broadcast"]
created = created or not bool(self.id)
# first off, we want to save normally no matter what
super().save(*args, **kwargs)
@ -602,7 +599,7 @@ def to_ordered_collection_page(
if activity_page.has_next():
next_page = f"{remote_id}?page={activity_page.next_page_number()}"
if activity_page.has_previous():
prev_page = f"{remote_id}?page=%d{activity_page.previous_page_number()}"
prev_page = f"{remote_id}?page={activity_page.previous_page_number()}"
return activitypub.OrderedCollectionPage(
id=f"{remote_id}?page={page}",
partOf=remote_id,

View file

@ -1,20 +1,25 @@
""" database schema for info about authors """
import re
from typing import Tuple, Any
from django.contrib.postgres.indexes import GinIndex
import re
from typing import Any
from django.db import models
from django.contrib.postgres.indexes import GinIndex
import pgtrigger
from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import BASE_URL
from bookwyrm.utils.db import format_trigger
from .book import BookDataModel
from .book import BookDataModel, MergedAuthor
from . import fields
class Author(BookDataModel):
"""basic biographic info"""
merged_model = MergedAuthor
wikipedia_link = fields.CharField(
max_length=255, blank=True, null=True, deduplication_field=True
)
@ -40,12 +45,12 @@ class Author(BookDataModel):
)
bio = fields.HtmlField(null=True, blank=True)
def save(self, *args: Tuple[Any, ...], **kwargs: dict[str, Any]) -> None:
def save(self, *args: Any, **kwargs: Any) -> None:
"""normalize isni format"""
if self.isni:
if self.isni is not None:
self.isni = re.sub(r"\s", "", self.isni)
return super().save(*args, **kwargs)
super().save(*args, **kwargs)
@property
def isni_link(self):
@ -65,11 +70,48 @@ class Author(BookDataModel):
def get_remote_id(self):
"""editions and works both use "book" instead of model_name"""
return f"https://{DOMAIN}/author/{self.id}"
activity_serializer = activitypub.Author
return f"{BASE_URL}/author/{self.id}"
class Meta:
"""sets up postgres GIN index field"""
"""sets up indexes and triggers"""
# pylint: disable=line-too-long
indexes = (GinIndex(fields=["search_vector"]),)
triggers = [
pgtrigger.Trigger(
name="update_search_vector_on_author_edit",
when=pgtrigger.Before,
operation=pgtrigger.Insert
| pgtrigger.UpdateOf("name", "aliases", "search_vector"),
func=format_trigger(
"""new.search_vector :=
-- author name, with priority A
setweight(to_tsvector('simple', new.name), 'A') ||
-- author aliases, with priority B
setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');
RETURN new;
"""
),
),
pgtrigger.Trigger(
name="reset_book_search_vector_on_author_edit",
when=pgtrigger.After,
operation=pgtrigger.UpdateOf("name", "aliases"),
func=format_trigger(
"""WITH updated_books AS (
SELECT book_id
FROM bookwyrm_book_authors
WHERE author_id = new.id
)
UPDATE bookwyrm_book
SET search_vector = ''
FROM updated_books
WHERE id = updated_books.book_id;
RETURN new;
"""
),
),
]
activity_serializer = activitypub.Author

View file

@ -10,7 +10,7 @@ from django.http import Http404
from django.utils.translation import gettext_lazy as _
from django.utils.text import slugify
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import BASE_URL
from .fields import RemoteIdField
@ -38,7 +38,7 @@ class BookWyrmModel(models.Model):
def get_remote_id(self):
"""generate the url that resolves to the local object, without a slug"""
base_path = f"https://{DOMAIN}"
base_path = BASE_URL
if hasattr(self, "user"):
base_path = f"{base_path}{self.user.local_path}"
@ -53,7 +53,7 @@ class BookWyrmModel(models.Model):
@property
def local_path(self):
"""how to link to this object in the local app, with a slug"""
local = self.get_remote_id().replace(f"https://{DOMAIN}", "")
local = self.get_remote_id().replace(BASE_URL, "")
name = None
if hasattr(self, "name_field"):

View file

@ -1,29 +1,33 @@
""" database schema for books and shelves """
from itertools import chain
import re
from typing import Any
from typing import Any, Dict, Optional, Iterable
from typing_extensions import Self
from django.contrib.postgres.search import SearchVectorField
from django.contrib.postgres.indexes import GinIndex
from django.core.cache import cache
from django.db import models, transaction
from django.db.models import Prefetch
from django.db.models import Prefetch, ManyToManyField
from django.dispatch import receiver
from django.utils.translation import gettext_lazy as _
from model_utils import FieldTracker
from model_utils.managers import InheritanceManager
from imagekit.models import ImageSpecField
import pgtrigger
from bookwyrm import activitypub
from bookwyrm.isbn.isbn import hyphenator_singleton as hyphenator
from bookwyrm.preview_images import generate_edition_preview_image_task
from bookwyrm.settings import (
DOMAIN,
BASE_URL,
DEFAULT_LANGUAGE,
LANGUAGE_ARTICLES,
ENABLE_PREVIEW_IMAGES,
ENABLE_THUMBNAIL_GENERATION,
)
from bookwyrm.utils.db import format_trigger, add_update_fields
from .activitypub_mixin import OrderedCollectionPageMixin, ObjectMixin
from .base_model import BookWyrmModel
@ -92,24 +96,134 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
abstract = True
def save(self, *args: Any, **kwargs: Any) -> None:
def save(
self, *args: Any, update_fields: Optional[Iterable[str]] = None, **kwargs: Any
) -> None:
"""ensure that the remote_id is within this instance"""
if self.id:
self.remote_id = self.get_remote_id()
update_fields = add_update_fields(update_fields, "remote_id")
else:
self.origin_id = self.remote_id
self.remote_id = None
return super().save(*args, **kwargs)
update_fields = add_update_fields(update_fields, "origin_id", "remote_id")
super().save(*args, update_fields=update_fields, **kwargs)
# pylint: disable=arguments-differ
def broadcast(self, activity, sender, software="bookwyrm", **kwargs):
"""only send book data updates to other bookwyrm instances"""
super().broadcast(activity, sender, software=software, **kwargs)
def merge_into(self, canonical: Self, dry_run=False) -> Dict[str, Any]:
"""merge this entity into another entity"""
if canonical.id == self.id:
raise ValueError(f"Cannot merge {self} into itself")
absorbed_fields = canonical.absorb_data_from(self, dry_run=dry_run)
if dry_run:
return absorbed_fields
canonical.save()
self.merged_model.objects.create(deleted_id=self.id, merged_into=canonical)
# move related models to canonical
related_models = [
(r.remote_field.name, r.related_model) for r in self._meta.related_objects
]
# pylint: disable=protected-access
for related_field, related_model in related_models:
# Skip the ManyToMany fields that arent auto-created. These
# should have a corresponding OneToMany field in the model for
# the linking table anyway. If we update it through that model
# instead then we wont lose the extra fields in the linking
# table.
# pylint: disable=protected-access
related_field_obj = related_model._meta.get_field(related_field)
if isinstance(related_field_obj, ManyToManyField):
through = related_field_obj.remote_field.through
if not through._meta.auto_created:
continue
related_objs = related_model.objects.filter(**{related_field: self})
for related_obj in related_objs:
try:
setattr(related_obj, related_field, canonical)
related_obj.save()
except TypeError:
getattr(related_obj, related_field).add(canonical)
getattr(related_obj, related_field).remove(self)
self.delete()
return absorbed_fields
def absorb_data_from(self, other: Self, dry_run=False) -> Dict[str, Any]:
"""fill empty fields with values from another entity"""
absorbed_fields = {}
for data_field in self._meta.get_fields():
if not hasattr(data_field, "activitypub_field"):
continue
canonical_value = getattr(self, data_field.name)
other_value = getattr(other, data_field.name)
if not other_value:
continue
if isinstance(data_field, fields.ArrayField):
if new_values := list(set(other_value) - set(canonical_value)):
# append at the end (in no particular order)
if not dry_run:
setattr(self, data_field.name, canonical_value + new_values)
absorbed_fields[data_field.name] = new_values
elif isinstance(data_field, fields.PartialDateField):
if (
(not canonical_value)
or (other_value.has_day and not canonical_value.has_day)
or (other_value.has_month and not canonical_value.has_month)
):
if not dry_run:
setattr(self, data_field.name, other_value)
absorbed_fields[data_field.name] = other_value
else:
if not canonical_value:
if not dry_run:
setattr(self, data_field.name, other_value)
absorbed_fields[data_field.name] = other_value
return absorbed_fields
class MergedBookDataModel(models.Model):
"""a BookDataModel instance that has been merged into another instance. kept
to be able to redirect old URLs"""
deleted_id = models.IntegerField(primary_key=True)
class Meta:
"""abstract just like BookDataModel"""
abstract = True
class MergedBook(MergedBookDataModel):
"""an Book that has been merged into another one"""
merged_into = models.ForeignKey(
"Book", on_delete=models.PROTECT, related_name="absorbed"
)
class MergedAuthor(MergedBookDataModel):
"""an Author that has been merged into another one"""
merged_into = models.ForeignKey(
"Author", on_delete=models.PROTECT, related_name="absorbed"
)
class Book(BookDataModel):
"""a generic book, which can mean either an edition or a work"""
merged_model = MergedBook
connector = models.ForeignKey("Connector", on_delete=models.PROTECT, null=True)
# book/work metadata
@ -190,9 +304,13 @@ class Book(BookDataModel):
"""properties of this edition, as a string"""
items = [
self.physical_format if hasattr(self, "physical_format") else None,
f"{self.languages[0]} language"
if self.languages and self.languages[0] and self.languages[0] != "English"
else None,
(
f"{self.languages[0]} language"
if self.languages
and self.languages[0]
and self.languages[0] != "English"
else None
),
str(self.published_date.year) if self.published_date else None,
", ".join(self.publishers) if hasattr(self, "publishers") else None,
]
@ -210,11 +328,11 @@ class Book(BookDataModel):
if not isinstance(self, (Edition, Work)):
raise ValueError("Books should be added as Editions or Works")
return super().save(*args, **kwargs)
super().save(*args, **kwargs)
def get_remote_id(self):
"""editions and works both use "book" instead of model_name"""
return f"https://{DOMAIN}/book/{self.id}"
return f"{BASE_URL}/book/{self.id}"
def guess_sort_title(self):
"""Get a best-guess sort title for the current book"""
@ -232,9 +350,49 @@ class Book(BookDataModel):
)
class Meta:
"""sets up postgres GIN index field"""
"""set up indexes and triggers"""
# pylint: disable=line-too-long
indexes = (GinIndex(fields=["search_vector"]),)
triggers = [
pgtrigger.Trigger(
name="update_search_vector_on_book_edit",
when=pgtrigger.Before,
operation=pgtrigger.Insert
| pgtrigger.UpdateOf("title", "subtitle", "series", "search_vector"),
func=format_trigger(
"""
WITH author_names AS (
SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases
FROM bookwyrm_author
LEFT JOIN bookwyrm_book_authors
ON bookwyrm_author.id = bookwyrm_book_authors.author_id
WHERE bookwyrm_book_authors.book_id = new.id
)
SELECT
-- title, with priority A (parse in English, default to simple if empty)
setweight(COALESCE(nullif(
to_tsvector('english', new.title), ''),
to_tsvector('simple', new.title)), 'A') ||
-- subtitle, with priority B (always in English?)
setweight(to_tsvector('english', COALESCE(new.subtitle, '')), 'B') ||
-- list of authors names and aliases (with priority C)
(SELECT setweight(to_tsvector('simple', COALESCE(array_to_string(ARRAY_AGG(name_and_aliases), ' '), '')), 'C')
FROM author_names
) ||
--- last: series name, with lowest priority
setweight(to_tsvector('english', COALESCE(new.series, '')), 'D')
INTO new.search_vector;
RETURN new;
"""
),
)
]
class Work(OrderedCollectionPageMixin, Book):
@ -247,10 +405,11 @@ class Work(OrderedCollectionPageMixin, Book):
def save(self, *args, **kwargs):
"""set some fields on the edition object"""
super().save(*args, **kwargs)
# set rank
for edition in self.editions.all():
edition.save()
return super().save(*args, **kwargs)
@property
def default_edition(self):
@ -356,33 +515,48 @@ class Edition(Book):
# max rank is 9
return rank
def save(self, *args: Any, **kwargs: Any) -> None:
def save(
self, *args: Any, update_fields: Optional[Iterable[str]] = None, **kwargs: Any
) -> None:
"""set some fields on the edition object"""
# calculate isbn 10/13
if self.isbn_13 and self.isbn_13[:3] == "978" and not self.isbn_10:
if (
self.isbn_10 is None
and self.isbn_13 is not None
and self.isbn_13[:3] == "978"
):
self.isbn_10 = isbn_13_to_10(self.isbn_13)
if self.isbn_10 and not self.isbn_13:
update_fields = add_update_fields(update_fields, "isbn_10")
if self.isbn_13 is None and self.isbn_10 is not None:
self.isbn_13 = isbn_10_to_13(self.isbn_10)
update_fields = add_update_fields(update_fields, "isbn_13")
# normalize isbn format
if self.isbn_10:
if self.isbn_10 is not None:
self.isbn_10 = normalize_isbn(self.isbn_10)
if self.isbn_13:
if self.isbn_13 is not None:
self.isbn_13 = normalize_isbn(self.isbn_13)
# set rank
self.edition_rank = self.get_rank()
# clear author cache
if self.id:
for author_id in self.authors.values_list("id", flat=True):
cache.delete(f"author-books-{author_id}")
if (new := self.get_rank()) != self.edition_rank:
self.edition_rank = new
update_fields = add_update_fields(update_fields, "edition_rank")
# Create sort title by removing articles from title
if self.sort_title in [None, ""]:
self.sort_title = self.guess_sort_title()
update_fields = add_update_fields(update_fields, "sort_title")
return super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
# clear author cache
if self.id:
cache.delete_many(
[
f"author-books-{author_id}"
for author_id in self.authors.values_list("id", flat=True)
]
)
@transaction.atomic
def repair(self):

View file

@ -0,0 +1,341 @@
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
import logging
import os
from boto3.session import Session as BotoSession
from s3_tar import S3Tar
from django.db.models import BooleanField, FileField, JSONField
from django.core.serializers.json import DjangoJSONEncoder
from django.core.files.base import ContentFile
from django.core.files.storage import storages
from bookwyrm import settings
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, ListItem
from bookwyrm.models import Review, Comment, Quotation
from bookwyrm.models import Edition
from bookwyrm.models import UserFollows, User, UserBlocks
from bookwyrm.models.job import ParentJob
from bookwyrm.tasks import app, IMPORTS
from bookwyrm.utils.tar import BookwyrmTarFile
logger = logging.getLogger(__name__)
class BookwyrmAwsSession(BotoSession):
"""a boto session that always uses settings.AWS_S3_ENDPOINT_URL"""
def client(self, *args, **kwargs): # pylint: disable=arguments-differ
kwargs["endpoint_url"] = settings.AWS_S3_ENDPOINT_URL
return super().client("s3", *args, **kwargs)
def select_exports_storage():
"""callable to allow for dependency on runtime configuration"""
return storages["exports"]
class BookwyrmExportJob(ParentJob):
"""entry for a specific request to export a bookwyrm user"""
export_data = FileField(null=True, storage=select_exports_storage)
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
json_completed = BooleanField(default=False)
def start_job(self):
"""schedule the first task"""
task = create_export_json_task.delay(job_id=self.id)
self.task_id = task.id
self.save(update_fields=["task_id"])
@app.task(queue=IMPORTS)
def create_export_json_task(job_id):
"""create the JSON data for the export"""
job = BookwyrmExportJob.objects.get(id=job_id)
# don't start the job if it was stopped from the UI
if job.complete:
return
try:
job.set_status("active")
# generate JSON structure
job.export_json = export_json(job.user)
job.save(update_fields=["export_json"])
# create archive in separate task
create_archive_task.delay(job_id=job.id)
except Exception as err: # pylint: disable=broad-except
logger.exception(
"create_export_json_task for %s failed with error: %s", job, err
)
job.set_status("failed")
def archive_file_location(file, directory="") -> str:
"""get the relative location of a file inside the archive"""
return os.path.join(directory, file.name)
def add_file_to_s3_tar(s3_tar: S3Tar, storage, file, directory=""):
"""
add file to S3Tar inside directory, keeping any directories under its
storage location
"""
s3_tar.add_file(
os.path.join(storage.location, file.name),
folder=os.path.dirname(archive_file_location(file, directory=directory)),
)
@app.task(queue=IMPORTS)
def create_archive_task(job_id):
"""create the archive containing the JSON file and additional files"""
job = BookwyrmExportJob.objects.get(id=job_id)
# don't start the job if it was stopped from the UI
if job.complete:
return
try:
export_task_id = str(job.task_id)
archive_filename = f"{export_task_id}.tar.gz"
export_json_bytes = DjangoJSONEncoder().encode(job.export_json).encode("utf-8")
user = job.user
editions = get_books_for_user(user)
if settings.USE_S3:
# Storage for writing temporary files
exports_storage = storages["exports"]
# Handle for creating the final archive
s3_tar = S3Tar(
exports_storage.bucket_name,
os.path.join(exports_storage.location, archive_filename),
session=BookwyrmAwsSession(),
)
# Save JSON file to a temporary location
export_json_tmp_file = os.path.join(export_task_id, "archive.json")
exports_storage.save(
export_json_tmp_file,
ContentFile(export_json_bytes),
)
s3_tar.add_file(
os.path.join(exports_storage.location, export_json_tmp_file)
)
# Add images to TAR
images_storage = storages["default"]
if user.avatar:
add_file_to_s3_tar(s3_tar, images_storage, user.avatar)
for edition in editions:
if edition.cover:
add_file_to_s3_tar(
s3_tar, images_storage, edition.cover, directory="images"
)
# Create archive and store file name
s3_tar.tar()
job.export_data = archive_filename
job.save(update_fields=["export_data"])
# Delete temporary files
exports_storage.delete(export_json_tmp_file)
else:
job.export_data = archive_filename
with job.export_data.open("wb") as tar_file:
with BookwyrmTarFile.open(mode="w:gz", fileobj=tar_file) as tar:
# save json file
tar.write_bytes(export_json_bytes)
# Add avatar image if present
if user.avatar:
tar.add_image(user.avatar)
for edition in editions:
if edition.cover:
tar.add_image(edition.cover, directory="images")
job.save(update_fields=["export_data"])
job.set_status("completed")
except Exception as err: # pylint: disable=broad-except
logger.exception("create_archive_task for %s failed with error: %s", job, err)
job.set_status("failed")
def export_json(user: User):
"""create export JSON"""
data = export_user(user) # in the root of the JSON structure
data["settings"] = export_settings(user)
data["goals"] = export_goals(user)
data["books"] = export_books(user)
data["saved_lists"] = export_saved_lists(user)
data["follows"] = export_follows(user)
data["blocks"] = export_blocks(user)
return data
def export_user(user: User):
"""export user data"""
data = user.to_activity()
if user.avatar:
data["icon"]["url"] = archive_file_location(user.avatar)
else:
data["icon"] = {}
return data
def export_settings(user: User):
"""Additional settings - can't be serialized as AP"""
vals = [
"show_goal",
"preferred_timezone",
"default_post_privacy",
"show_suggested_users",
]
return {k: getattr(user, k) for k in vals}
def export_saved_lists(user: User):
"""add user saved lists to export JSON"""
return [l.remote_id for l in user.saved_lists.all()]
def export_follows(user: User):
"""add user follows to export JSON"""
follows = UserFollows.objects.filter(user_subject=user).distinct()
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
return [f.remote_id for f in following]
def export_blocks(user: User):
"""add user blocks to export JSON"""
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
return [b.remote_id for b in blocking]
def export_goals(user: User):
"""add user reading goals to export JSON"""
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
return [
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
for goal in reading_goals
]
def export_books(user: User):
"""add books to export JSON"""
editions = get_books_for_user(user)
return [export_book(user, edition) for edition in editions]
def export_book(user: User, edition: Edition):
"""add book to export JSON"""
data = {}
data["work"] = edition.parent_work.to_activity()
data["edition"] = edition.to_activity()
if edition.cover:
data["edition"]["cover"]["url"] = archive_file_location(
edition.cover, directory="images"
)
# authors
data["authors"] = [author.to_activity() for author in edition.authors.all()]
# Shelves this book is on
# Every ShelfItem is this book so we don't other serializing
shelf_books = (
ShelfBook.objects.select_related("shelf")
.filter(user=user, book=edition)
.distinct()
)
data["shelves"] = [shelfbook.shelf.to_activity() for shelfbook in shelf_books]
# Lists and ListItems
# ListItems include "notes" and "approved" so we need them
# even though we know it's this book
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
data["lists"] = []
for item in list_items:
list_info = item.book_list.to_activity()
list_info[
"privacy"
] = item.book_list.privacy # this isn't serialized so we add it
list_info["list_item"] = item.to_activity()
data["lists"].append(list_info)
# Statuses
# Can't use select_subclasses here because
# we need to filter on the "book" value,
# which is not available on an ordinary Status
for status in ["comments", "quotations", "reviews"]:
data[status] = []
comments = Comment.objects.filter(user=user, book=edition).all()
for status in comments:
obj = status.to_activity()
obj["progress"] = status.progress
obj["progress_mode"] = status.progress_mode
data["comments"].append(obj)
quotes = Quotation.objects.filter(user=user, book=edition).all()
for status in quotes:
obj = status.to_activity()
obj["position"] = status.position
obj["endposition"] = status.endposition
obj["position_mode"] = status.position_mode
data["quotations"].append(obj)
reviews = Review.objects.filter(user=user, book=edition).all()
data["reviews"] = [status.to_activity() for status in reviews]
# readthroughs can't be serialized to activity
book_readthroughs = (
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
)
data["readthroughs"] = list(book_readthroughs)
return data
def get_books_for_user(user):
"""
Get all the books and editions related to a user.
We use union() instead of Q objects because it creates
multiple simple queries in stead of a much more complex DB query
that can time out.
"""
shelf_eds = Edition.objects.select_related("parent_work").filter(shelves__user=user)
rt_eds = Edition.objects.select_related("parent_work").filter(
readthrough__user=user
)
review_eds = Edition.objects.select_related("parent_work").filter(review__user=user)
list_eds = Edition.objects.select_related("parent_work").filter(list__user=user)
comment_eds = Edition.objects.select_related("parent_work").filter(
comment__user=user
)
quote_eds = Edition.objects.select_related("parent_work").filter(
quotation__user=user
)
editions = shelf_eds.union(rt_eds, review_eds, list_eds, comment_eds, quote_eds)
return editions

View file

@ -0,0 +1,462 @@
"""Import a user from another Bookwyrm instance"""
import json
import logging
from django.db.models import FileField, JSONField, CharField
from django.utils import timezone
from django.utils.html import strip_tags
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
from bookwyrm import activitypub
from bookwyrm import models
from bookwyrm.tasks import app, IMPORTS
from bookwyrm.models.job import ParentJob, ParentTask, SubTask
from bookwyrm.utils.tar import BookwyrmTarFile
logger = logging.getLogger(__name__)
class BookwyrmImportJob(ParentJob):
"""entry for a specific request for importing a bookwyrm user backup"""
archive_file = FileField(null=True, blank=True)
import_data = JSONField(null=True)
required = DjangoArrayField(CharField(max_length=50, blank=True), blank=True)
def start_job(self):
"""Start the job"""
start_import_task.delay(job_id=self.id, no_children=True)
@app.task(queue=IMPORTS, base=ParentTask)
def start_import_task(**kwargs):
"""trigger the child import tasks for each user data"""
job = BookwyrmImportJob.objects.get(id=kwargs["job_id"])
archive_file = job.archive_file
# don't start the job if it was stopped from the UI
if job.complete:
return
try:
archive_file.open("rb")
with BookwyrmTarFile.open(mode="r:gz", fileobj=archive_file) as tar:
json_filename = next(
filter(lambda n: n.startswith("archive"), tar.getnames())
)
job.import_data = json.loads(tar.read(json_filename).decode("utf-8"))
if "include_user_profile" in job.required:
update_user_profile(job.user, tar, job.import_data)
if "include_user_settings" in job.required:
update_user_settings(job.user, job.import_data)
if "include_goals" in job.required:
update_goals(job.user, job.import_data.get("goals", []))
if "include_saved_lists" in job.required:
upsert_saved_lists(job.user, job.import_data.get("saved_lists", []))
if "include_follows" in job.required:
upsert_follows(job.user, job.import_data.get("follows", []))
if "include_blocks" in job.required:
upsert_user_blocks(job.user, job.import_data.get("blocks", []))
process_books(job, tar)
job.set_status("complete")
archive_file.close()
except Exception as err: # pylint: disable=broad-except
logger.exception("User Import Job %s Failed with error: %s", job.id, err)
job.set_status("failed")
def process_books(job, tar):
"""
Process user import data related to books
We always import the books even if not assigning
them to shelves, lists etc
"""
books = job.import_data.get("books")
for data in books:
book = get_or_create_edition(data, tar)
if "include_shelves" in job.required:
upsert_shelves(book, job.user, data)
if "include_readthroughs" in job.required:
upsert_readthroughs(data.get("readthroughs"), job.user, book.id)
if "include_comments" in job.required:
upsert_statuses(
job.user, models.Comment, data.get("comments"), book.remote_id
)
if "include_quotations" in job.required:
upsert_statuses(
job.user, models.Quotation, data.get("quotations"), book.remote_id
)
if "include_reviews" in job.required:
upsert_statuses(
job.user, models.Review, data.get("reviews"), book.remote_id
)
if "include_lists" in job.required:
upsert_lists(job.user, data.get("lists"), book.id)
def get_or_create_edition(book_data, tar):
"""Take a JSON string of work and edition data,
find or create the edition and work in the database and
return an edition instance"""
edition = book_data.get("edition")
existing = models.Edition.find_existing(edition)
if existing:
return existing
# make sure we have the authors in the local DB
# replace the old author ids in the edition JSON
edition["authors"] = []
for author in book_data.get("authors"):
parsed_author = activitypub.parse(author)
instance = parsed_author.to_model(
model=models.Author, save=True, overwrite=True
)
edition["authors"].append(instance.remote_id)
# we will add the cover later from the tar
# don't try to load it from the old server
cover = edition.get("cover", {})
cover_path = cover.get("url", None)
edition["cover"] = {}
# first we need the parent work to exist
work = book_data.get("work")
work["editions"] = []
parsed_work = activitypub.parse(work)
work_instance = parsed_work.to_model(model=models.Work, save=True, overwrite=True)
# now we have a work we can add it to the edition
# and create the edition model instance
edition["work"] = work_instance.remote_id
parsed_edition = activitypub.parse(edition)
book = parsed_edition.to_model(model=models.Edition, save=True, overwrite=True)
# set the cover image from the tar
if cover_path:
tar.write_image_to_file(cover_path, book.cover)
return book
def upsert_readthroughs(data, user, book_id):
"""Take a JSON string of readthroughs and
find or create the instances in the database"""
for read_through in data:
obj = {}
keys = [
"progress_mode",
"start_date",
"finish_date",
"stopped_date",
"is_active",
]
for key in keys:
obj[key] = read_through[key]
obj["user_id"] = user.id
obj["book_id"] = book_id
existing = models.ReadThrough.objects.filter(**obj).first()
if not existing:
models.ReadThrough.objects.create(**obj)
def upsert_statuses(user, cls, data, book_remote_id):
"""Take a JSON string of a status and
find or create the instances in the database"""
for status in data:
if is_alias(
user, status["attributedTo"]
): # don't let l33t hax0rs steal other people's posts
# update ids and remove replies
status["attributedTo"] = user.remote_id
status["to"] = update_followers_address(user, status["to"])
status["cc"] = update_followers_address(user, status["cc"])
status[
"replies"
] = (
{}
) # this parses incorrectly but we can't set it without knowing the new id
status["inReplyToBook"] = book_remote_id
parsed = activitypub.parse(status)
if not status_already_exists(
user, parsed
): # don't duplicate posts on multiple import
instance = parsed.to_model(model=cls, save=True, overwrite=True)
for val in [
"progress",
"progress_mode",
"position",
"endposition",
"position_mode",
]:
if status.get(val):
instance.val = status[val]
instance.remote_id = instance.get_remote_id() # update the remote_id
instance.save() # save and broadcast
else:
logger.warning("User does not have permission to import statuses")
def upsert_lists(user, lists, book_id):
"""Take a list of objects each containing
a list and list item as AP objects
Because we are creating new IDs we can't assume the id
will exist or be accurate, so we only use to_model for
adding new items after checking whether they exist .
"""
book = models.Edition.objects.get(id=book_id)
for blist in lists:
booklist = models.List.objects.filter(name=blist["name"], user=user).first()
if not booklist:
blist["owner"] = user.remote_id
parsed = activitypub.parse(blist)
booklist = parsed.to_model(model=models.List, save=True, overwrite=True)
booklist.privacy = blist["privacy"]
booklist.save()
item = models.ListItem.objects.filter(book=book, book_list=booklist).exists()
if not item:
count = booklist.books.count()
models.ListItem.objects.create(
book=book,
book_list=booklist,
user=user,
notes=blist["list_item"]["notes"],
approved=blist["list_item"]["approved"],
order=count + 1,
)
def upsert_shelves(book, user, book_data):
"""Take shelf JSON objects and create
DB entries if they don't already exist"""
shelves = book_data["shelves"]
for shelf in shelves:
book_shelf = models.Shelf.objects.filter(name=shelf["name"], user=user).first()
if not book_shelf:
book_shelf = models.Shelf.objects.create(name=shelf["name"], user=user)
# add the book as a ShelfBook if needed
if not models.ShelfBook.objects.filter(
book=book, shelf=book_shelf, user=user
).exists():
models.ShelfBook.objects.create(
book=book, shelf=book_shelf, user=user, shelved_date=timezone.now()
)
def update_user_profile(user, tar, data):
"""update the user's profile from import data"""
name = data.get("name", None)
username = data.get("preferredUsername")
user.name = name if name else username
user.summary = strip_tags(data.get("summary", None))
user.save(update_fields=["name", "summary"])
if data["icon"].get("url"):
avatar_filename = next(filter(lambda n: n.startswith("avatar"), tar.getnames()))
tar.write_image_to_file(avatar_filename, user.avatar)
def update_user_settings(user, data):
"""update the user's settings from import data"""
update_fields = ["manually_approves_followers", "hide_follows", "discoverable"]
ap_fields = [
("manuallyApprovesFollowers", "manually_approves_followers"),
("hideFollows", "hide_follows"),
("discoverable", "discoverable"),
]
for (ap_field, bw_field) in ap_fields:
setattr(user, bw_field, data[ap_field])
bw_fields = [
"show_goal",
"show_suggested_users",
"default_post_privacy",
"preferred_timezone",
]
for field in bw_fields:
update_fields.append(field)
setattr(user, field, data["settings"][field])
user.save(update_fields=update_fields)
@app.task(queue=IMPORTS, base=SubTask)
def update_user_settings_task(job_id):
"""wrapper task for user's settings import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return update_user_settings(parent_job.user, parent_job.import_data.get("user"))
def update_goals(user, data):
"""update the user's goals from import data"""
for goal in data:
# edit the existing goal if there is one
existing = models.AnnualGoal.objects.filter(
year=goal["year"], user=user
).first()
if existing:
for k in goal.keys():
setattr(existing, k, goal[k])
existing.save()
else:
goal["user"] = user
models.AnnualGoal.objects.create(**goal)
@app.task(queue=IMPORTS, base=SubTask)
def update_goals_task(job_id):
"""wrapper task for user's goals import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return update_goals(parent_job.user, parent_job.import_data.get("goals"))
def upsert_saved_lists(user, values):
"""Take a list of remote ids and add as saved lists"""
for remote_id in values:
book_list = activitypub.resolve_remote_id(remote_id, models.List)
if book_list:
user.saved_lists.add(book_list)
@app.task(queue=IMPORTS, base=SubTask)
def upsert_saved_lists_task(job_id):
"""wrapper task for user's saved lists import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return upsert_saved_lists(
parent_job.user, parent_job.import_data.get("saved_lists")
)
def upsert_follows(user, values):
"""Take a list of remote ids and add as follows"""
for remote_id in values:
followee = activitypub.resolve_remote_id(remote_id, models.User)
if followee:
(follow_request, created,) = models.UserFollowRequest.objects.get_or_create(
user_subject=user,
user_object=followee,
)
if not created:
# this request probably failed to connect with the remote
# and should save to trigger a re-broadcast
follow_request.save()
@app.task(queue=IMPORTS, base=SubTask)
def upsert_follows_task(job_id):
"""wrapper task for user's follows import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return upsert_follows(parent_job.user, parent_job.import_data.get("follows"))
def upsert_user_blocks(user, user_ids):
"""block users"""
for user_id in user_ids:
user_object = activitypub.resolve_remote_id(user_id, models.User)
if user_object:
exists = models.UserBlocks.objects.filter(
user_subject=user, user_object=user_object
).exists()
if not exists:
models.UserBlocks.objects.create(
user_subject=user, user_object=user_object
)
# remove the blocked users's lists from the groups
models.List.remove_from_group(user, user_object)
# remove the blocked user from all blocker's owned groups
models.GroupMember.remove(user, user_object)
@app.task(queue=IMPORTS, base=SubTask)
def upsert_user_blocks_task(job_id):
"""wrapper task for user's blocks import"""
parent_job = BookwyrmImportJob.objects.get(id=job_id)
return upsert_user_blocks(
parent_job.user, parent_job.import_data.get("blocked_users")
)
def update_followers_address(user, field):
"""statuses to or cc followers need to have the followers
address updated to the new local user"""
for i, audience in enumerate(field):
if audience.rsplit("/")[-1] == "followers":
field[i] = user.followers_url
return field
def is_alias(user, remote_id):
"""check that the user is listed as movedTo or also_known_as
in the remote user's profile"""
remote_user = activitypub.resolve_remote_id(
remote_id=remote_id, model=models.User, save=False
)
if remote_user:
if remote_user.moved_to:
return user.remote_id == remote_user.moved_to
if remote_user.also_known_as:
return user in remote_user.also_known_as.all()
return False
def status_already_exists(user, status):
"""check whether this status has already been published
by this user. We can't rely on to_model() because it
only matches on remote_id, which we have to change
*after* saving because it needs the primary key (id)"""
return models.Status.objects.filter(
user=user, content=status.content, published_date=status.published
).exists()

View file

@ -11,7 +11,7 @@ ConnectorFiles = models.TextChoices("ConnectorFiles", CONNECTORS)
class Connector(BookWyrmModel):
"""book data source connectors"""
identifier = models.CharField(max_length=255, unique=True)
identifier = models.CharField(max_length=255, unique=True) # domain
priority = models.IntegerField(default=2)
name = models.CharField(max_length=255, null=True, blank=True)
connector_file = models.CharField(max_length=255, choices=ConnectorFiles.choices)

View file

@ -16,7 +16,7 @@ FederationStatus = [
class FederatedServer(BookWyrmModel):
"""store which servers we federate with"""
server_name = models.CharField(max_length=255, unique=True)
server_name = models.CharField(max_length=255, unique=True) # domain
status = models.CharField(
max_length=255, default="federated", choices=FederationStatus
)
@ -64,5 +64,4 @@ class FederatedServer(BookWyrmModel):
def is_blocked(cls, url: str) -> bool:
"""look up if a domain is blocked"""
url = urlparse(url)
domain = url.netloc
return cls.objects.filter(server_name=domain, status="blocked").exists()
return cls.objects.filter(server_name=url.hostname, status="blocked").exists()

View file

@ -260,12 +260,12 @@ class PrivacyField(ActivitypubFieldMixin, models.CharField):
if to == [self.public]:
setattr(instance, self.name, "public")
elif self.public in cc:
setattr(instance, self.name, "unlisted")
elif to == [user.followers_url]:
setattr(instance, self.name, "followers")
elif cc == []:
setattr(instance, self.name, "direct")
elif self.public in cc:
setattr(instance, self.name, "unlisted")
else:
setattr(instance, self.name, "followers")
return original == getattr(instance, self.name)
@ -482,7 +482,7 @@ class ImageField(ActivitypubFieldMixin, models.ImageField):
if not url:
return None
return activitypub.Document(url=url, name=alt)
return activitypub.Image(url=url, name=alt)
def field_from_activity(self, value, allow_external_connections=True):
image_slug = value

View file

@ -1,7 +1,7 @@
""" do book related things with other users """
from django.db import models, IntegrityError, transaction
from django.db.models import Q
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import BASE_URL
from .base_model import BookWyrmModel
from . import fields
from .relationship import UserBlocks
@ -17,7 +17,7 @@ class Group(BookWyrmModel):
def get_remote_id(self):
"""don't want the user to be in there in this case"""
return f"https://{DOMAIN}/group/{self.id}"
return f"{BASE_URL}/group/{self.id}"
@classmethod
def followers_filter(cls, queryset, viewer):

View file

@ -2,18 +2,19 @@
from bookwyrm import activitypub
from .activitypub_mixin import ActivitypubMixin
from .base_model import BookWyrmModel
from .fields import CICharField
from .fields import CharField
class Hashtag(ActivitypubMixin, BookWyrmModel):
"a hashtag which can be used in statuses"
name = CICharField(
name = CharField(
max_length=256,
blank=False,
null=False,
activitypub_field="name",
deduplication_field=True,
db_collation="case_insensitive",
)
name_field = "name"

307
bookwyrm/models/job.py Normal file
View file

@ -0,0 +1,307 @@
"""Everything needed for Celery to multi-thread complex tasks."""
from django.db import models
from django.db import transaction
from django.utils.translation import gettext_lazy as _
from django.utils import timezone
from bookwyrm.models.user import User
from bookwyrm.tasks import app
class Job(models.Model):
"""Abstract model to store the state of a Task."""
class Status(models.TextChoices):
"""Possible job states."""
PENDING = "pending", _("Pending")
ACTIVE = "active", _("Active")
COMPLETE = "complete", _("Complete")
STOPPED = "stopped", _("Stopped")
FAILED = "failed", _("Failed")
task_id = models.UUIDField(unique=True, null=True, blank=True)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(default=timezone.now)
complete = models.BooleanField(default=False)
status = models.CharField(
max_length=50, choices=Status.choices, default=Status.PENDING, null=True
)
class Meta:
"""Make it abstract"""
abstract = True
def complete_job(self):
"""Report that the job has completed"""
if self.complete:
return
self.status = self.Status.COMPLETE
self.complete = True
self.updated_date = timezone.now()
self.save(update_fields=["status", "complete", "updated_date"])
def stop_job(self, reason=None):
"""Stop the job"""
if self.complete:
return
self.__terminate_job()
if reason and reason == "failed":
self.status = self.Status.FAILED
else:
self.status = self.Status.STOPPED
self.complete = True
self.updated_date = timezone.now()
self.save(update_fields=["status", "complete", "updated_date"])
def set_status(self, status):
"""Set job status"""
if self.complete:
return
if self.status == status:
return
if status == self.Status.COMPLETE:
self.complete_job()
return
if status == self.Status.STOPPED:
self.stop_job()
return
if status == self.Status.FAILED:
self.stop_job(reason="failed")
return
self.updated_date = timezone.now()
self.status = status
self.save(update_fields=["status", "updated_date"])
def __terminate_job(self):
"""Tell workers to ignore and not execute this task."""
app.control.revoke(self.task_id, terminate=True)
class ParentJob(Job):
"""Store the state of a Task which can spawn many :model:`ChildJob`s to spread
resource load.
Intended to be sub-classed if necessary via proxy or
multi-table inheritance.
Extends :model:`Job`.
"""
user = models.ForeignKey(User, on_delete=models.CASCADE)
def complete_job(self):
"""Report that the job has completed and stop pending
children. Extend.
"""
super().complete_job()
self.__terminate_pending_child_jobs()
def notify_child_job_complete(self):
"""let the job know when the items get work done"""
if self.complete:
return
self.updated_date = timezone.now()
self.save(update_fields=["updated_date"])
if not self.complete and self.has_completed:
self.complete_job()
def __terminate_job(self): # pylint: disable=unused-private-member
"""Tell workers to ignore and not execute this task
& pending child tasks. Extend.
"""
super().__terminate_job()
self.__terminate_pending_child_jobs()
def __terminate_pending_child_jobs(self):
"""Tell workers to ignore and not execute any pending child tasks."""
tasks = self.pending_child_jobs.filter(task_id__isnull=False).values_list(
"task_id", flat=True
)
app.control.revoke(list(tasks))
self.pending_child_jobs.update(status=self.Status.STOPPED)
@property
def has_completed(self):
"""has this job finished"""
return not self.pending_child_jobs.exists()
@property
def pending_child_jobs(self):
"""items that haven't been processed yet"""
return self.child_jobs.filter(complete=False)
class ChildJob(Job):
"""Stores the state of a Task for the related :model:`ParentJob`.
Intended to be sub-classed if necessary via proxy or
multi-table inheritance.
Extends :model:`Job`.
"""
parent_job = models.ForeignKey(
ParentJob, on_delete=models.CASCADE, related_name="child_jobs"
)
def set_status(self, status):
"""Set job and parent_job status. Extend."""
super().set_status(status)
if (
status == self.Status.ACTIVE
and self.parent_job.status == self.Status.PENDING
):
self.parent_job.set_status(self.Status.ACTIVE)
def complete_job(self):
"""Report to parent_job that the job has completed. Extend."""
super().complete_job()
self.parent_job.notify_child_job_complete()
class ParentTask(app.Task):
"""Used with ParentJob, Abstract Tasks execute code at specific points in
a Task's lifecycle, applying to all Tasks with the same 'base'.
All status & ParentJob.task_id assignment is managed here for you.
Usage e.g. @app.task(base=ParentTask)
"""
def before_start(
self, task_id, args, kwargs
): # pylint: disable=no-self-use, unused-argument
"""Handler called before the task starts. Override.
Prepare ParentJob before the task starts.
Arguments:
task_id (str): Unique id of the task to execute.
args (Tuple): Original arguments for the task to execute.
kwargs (Dict): Original keyword arguments for the task to execute.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
no_children (bool): If 'True' this is the only Task expected to run
for the given ParentJob.
Returns:
None: The return value of this handler is ignored.
"""
job = ParentJob.objects.get(id=kwargs["job_id"])
job.task_id = task_id
job.save(update_fields=["task_id"])
if kwargs["no_children"]:
job.set_status(ChildJob.Status.ACTIVE)
def on_success(
self, retval, task_id, args, kwargs
): # pylint: disable=no-self-use, unused-argument
"""Run by the worker if the task executes successfully. Override.
Update ParentJob on Task complete.
Arguments:
retval (Any): The return value of the task.
task_id (str): Unique id of the executed task.
args (Tuple): Original arguments for the executed task.
kwargs (Dict): Original keyword arguments for the executed task.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
no_children (bool): If 'True' this is the only Task expected to run
for the given ParentJob.
Returns:
None: The return value of this handler is ignored.
"""
if kwargs["no_children"]:
job = ParentJob.objects.get(id=kwargs["job_id"])
job.complete_job()
class SubTask(app.Task):
"""Used with ChildJob, Abstract Tasks execute code at specific points in
a Task's lifecycle, applying to all Tasks with the same 'base'.
All status & ChildJob.task_id assignment is managed here for you.
Usage e.g. @app.task(base=SubTask)
"""
def before_start(
self, task_id, *args, **kwargs
): # pylint: disable=no-self-use, unused-argument
"""Handler called before the task starts. Override.
Prepare ChildJob before the task starts.
Arguments:
task_id (str): Unique id of the task to execute.
args (Tuple): Original arguments for the task to execute.
kwargs (Dict): Original keyword arguments for the task to execute.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
child_id (int): Unique 'id' of the ChildJob.
Returns:
None: The return value of this handler is ignored.
"""
child_job = ChildJob.objects.get(id=kwargs["child_id"])
child_job.task_id = task_id
child_job.save(update_fields=["task_id"])
child_job.set_status(ChildJob.Status.ACTIVE)
def on_success(
self, retval, task_id, *args, **kwargs
): # pylint: disable=no-self-use, unused-argument
"""Run by the worker if the task executes successfully. Override.
Notify ChildJob of task completion.
Arguments:
retval (Any): The return value of the task.
task_id (str): Unique id of the executed task.
args (Tuple): Original arguments for the executed task.
kwargs (Dict): Original keyword arguments for the executed task.
Keyword Arguments:
job_id (int): Unique 'id' of the ParentJob.
child_id (int): Unique 'id' of the ChildJob.
Returns:
None: The return value of this handler is ignored.
"""
subtask = ChildJob.objects.get(id=kwargs["child_id"])
subtask.complete_job()
@transaction.atomic
def create_child_job(parent_job, task_callback):
"""Utility method for creating a ChildJob
and running a task to avoid DB race conditions
"""
child_job = ChildJob.objects.create(parent_job=parent_job)
transaction.on_commit(
lambda: task_callback.delay(job_id=parent_job.id, child_id=child_job.id)
)
return child_job

View file

@ -1,4 +1,5 @@
""" outlink data """
from typing import Optional, Iterable
from urllib.parse import urlparse
from django.core.exceptions import PermissionDenied
@ -6,6 +7,7 @@ from django.db import models
from django.utils.translation import gettext_lazy as _
from bookwyrm import activitypub
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import ActivitypubMixin
from .base_model import BookWyrmModel
from . import fields
@ -34,17 +36,19 @@ class Link(ActivitypubMixin, BookWyrmModel):
"""link name via the associated domain"""
return self.domain.name
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""create a link"""
# get or create the associated domain
if not self.domain:
domain = urlparse(self.url).netloc
domain = urlparse(self.url).hostname
self.domain, _ = LinkDomain.objects.get_or_create(domain=domain)
update_fields = add_update_fields(update_fields, "domain")
# this is never broadcast, the owning model broadcasts an update
if "broadcast" in kwargs:
del kwargs["broadcast"]
return super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
AvailabilityChoices = [
@ -88,8 +92,10 @@ class LinkDomain(BookWyrmModel):
return
raise PermissionDenied()
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""set a default name"""
if not self.name:
self.name = self.domain
super().save(*args, **kwargs)
update_fields = add_update_fields(update_fields, "name")
super().save(*args, update_fields=update_fields, **kwargs)

View file

@ -1,4 +1,5 @@
""" make a list of books!! """
from typing import Optional, Iterable
import uuid
from django.core.exceptions import PermissionDenied
@ -7,7 +8,8 @@ from django.db.models import Q
from django.utils import timezone
from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import BASE_URL
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin
from .base_model import BookWyrmModel
@ -50,7 +52,7 @@ class List(OrderedCollectionMixin, BookWyrmModel):
def get_remote_id(self):
"""don't want the user to be in there in this case"""
return f"https://{DOMAIN}/list/{self.id}"
return f"{BASE_URL}/list/{self.id}"
@property
def collection_queryset(self):
@ -124,11 +126,13 @@ class List(OrderedCollectionMixin, BookWyrmModel):
group=None, curation="closed"
)
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""on save, update embed_key and avoid clash with existing code"""
if not self.embed_key:
self.embed_key = uuid.uuid4()
super().save(*args, **kwargs)
update_fields = add_update_fields(update_fields, "embed_key")
super().save(*args, update_fields=update_fields, **kwargs)
class ListItem(CollectionItemMixin, BookWyrmModel):

View file

@ -10,7 +10,7 @@ from .notification import Notification, NotificationType
class Move(ActivityMixin, BookWyrmModel):
"""migrating an activitypub user account"""
"""migrating an activitypub object"""
user = fields.ForeignKey(
"User", on_delete=models.PROTECT, activitypub_field="actor"
@ -48,24 +48,21 @@ class MoveUser(Move):
"""update user info and broadcast it"""
# only allow if the source is listed in the target's alsoKnownAs
if self.user in self.target.also_known_as.all():
self.user.also_known_as.add(self.target.id)
self.user.update_active_date()
self.user.moved_to = self.target.remote_id
self.user.save(update_fields=["moved_to"])
if self.user.local:
kwargs[
"broadcast"
] = True # Only broadcast if we are initiating the Move
super().save(*args, **kwargs)
for follower in self.user.followers.all():
if follower.local:
Notification.notify(
follower, self.user, notification_type=NotificationType.MOVE
)
else:
if self.user not in self.target.also_known_as.all():
raise PermissionDenied()
self.user.also_known_as.add(self.target.id)
self.user.update_active_date()
self.user.moved_to = self.target.remote_id
self.user.save(update_fields=["moved_to"])
if self.user.local:
kwargs["broadcast"] = True # Only broadcast if we are initiating the Move
super().save(*args, **kwargs)
for follower in self.user.followers.all():
if follower.local:
Notification.notify(
follower, self.user, notification_type=NotificationType.MOVE
)

View file

@ -1,8 +1,16 @@
""" alert a user to activity """
from django.db import models, transaction
from django.dispatch import receiver
from bookwyrm.models.bookwyrm_export_job import BookwyrmExportJob
from .base_model import BookWyrmModel
from . import Boost, Favorite, GroupMemberInvitation, ImportJob, LinkDomain
from . import (
Boost,
Favorite,
GroupMemberInvitation,
ImportJob,
BookwyrmImportJob,
LinkDomain,
)
from . import ListItem, Report, Status, User, UserFollowRequest
from .site import InviteRequest
@ -23,6 +31,8 @@ class NotificationType(models.TextChoices):
# Imports
IMPORT = "IMPORT"
USER_IMPORT = "USER_IMPORT"
USER_EXPORT = "USER_EXPORT"
# List activity
ADD = "ADD"
@ -63,6 +73,9 @@ class Notification(BookWyrmModel):
)
related_status = models.ForeignKey("Status", on_delete=models.CASCADE, null=True)
related_import = models.ForeignKey("ImportJob", on_delete=models.CASCADE, null=True)
related_user_export = models.ForeignKey(
"BookwyrmExportJob", on_delete=models.CASCADE, null=True
)
related_list_items = models.ManyToManyField(
"ListItem", symmetrical=False, related_name="notifications"
)
@ -226,6 +239,36 @@ def notify_user_on_import_complete(
)
@receiver(models.signals.post_save, sender=BookwyrmImportJob)
# pylint: disable=unused-argument
def notify_user_on_user_import_complete(
sender, instance, *args, update_fields=None, **kwargs
):
"""we imported your user details! aren't you proud of us"""
update_fields = update_fields or []
if not instance.complete or "complete" not in update_fields:
return
Notification.objects.create(
user=instance.user, notification_type=NotificationType.USER_IMPORT
)
@receiver(models.signals.post_save, sender=BookwyrmExportJob)
# pylint: disable=unused-argument
def notify_user_on_user_export_complete(
sender, instance, *args, update_fields=None, **kwargs
):
"""we exported your user details! aren't you proud of us"""
update_fields = update_fields or []
if not instance.complete or "complete" not in update_fields:
return
Notification.objects.create(
user=instance.user,
notification_type=NotificationType.USER_EXPORT,
related_user_export=instance,
)
@receiver(models.signals.post_save, sender=Report)
@transaction.atomic
# pylint: disable=unused-argument

View file

@ -1,9 +1,13 @@
""" progress in a book """
from typing import Optional, Iterable
from django.core import validators
from django.core.cache import cache
from django.db import models
from django.db.models import F, Q
from bookwyrm.utils.db import add_update_fields
from .base_model import BookWyrmModel
@ -30,14 +34,17 @@ class ReadThrough(BookWyrmModel):
stopped_date = models.DateTimeField(blank=True, null=True)
is_active = models.BooleanField(default=True)
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""update user active time"""
cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}")
self.user.update_active_date()
# an active readthrough must have an unset finish date
if self.finish_date or self.stopped_date:
self.is_active = False
super().save(*args, **kwargs)
update_fields = add_update_fields(update_fields, "is_active")
super().save(*args, update_fields=update_fields, **kwargs)
cache.delete(f"latest_read_through-{self.user_id}-{self.book_id}")
self.user.update_active_date()
def create_update(self):
"""add update to the readthrough"""

View file

@ -38,14 +38,16 @@ class UserRelationship(BookWyrmModel):
def save(self, *args, **kwargs):
"""clear the template cache"""
clear_cache(self.user_subject, self.user_object)
super().save(*args, **kwargs)
clear_cache(self.user_subject, self.user_object)
def delete(self, *args, **kwargs):
"""clear the template cache"""
clear_cache(self.user_subject, self.user_object)
super().delete(*args, **kwargs)
clear_cache(self.user_subject, self.user_object)
class Meta:
"""relationships should be unique"""
@ -65,6 +67,13 @@ class UserRelationship(BookWyrmModel):
base_path = self.user_subject.remote_id
return f"{base_path}#follows/{self.id}"
def get_accept_reject_id(self, status):
"""get id for sending an accept or reject of a local user"""
base_path = self.user_object.remote_id
status_id = self.id or 0
return f"{base_path}#{status}/{status_id}"
class UserFollows(ActivityMixin, UserRelationship):
"""Following a user"""
@ -105,6 +114,20 @@ class UserFollows(ActivityMixin, UserRelationship):
)
return obj
def reject(self):
"""generate a Reject for this follow. This would normally happen
when a user deletes a follow they previously accepted"""
if self.user_object.local:
activity = activitypub.Reject(
id=self.get_accept_reject_id(status="rejects"),
actor=self.user_object.remote_id,
object=self.to_activity(),
).serialize()
self.broadcast(activity, self.user_object)
self.delete()
class UserFollowRequest(ActivitypubMixin, UserRelationship):
"""following a user requires manual or automatic confirmation"""
@ -148,13 +171,6 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship):
if not manually_approves:
self.accept()
def get_accept_reject_id(self, status):
"""get id for sending an accept or reject of a local user"""
base_path = self.user_object.remote_id
status_id = self.id or 0
return f"{base_path}#{status}/{status_id}"
def accept(self, broadcast_only=False):
"""turn this request into the real deal"""
user = self.user_object

View file

@ -3,7 +3,7 @@ from django.core.exceptions import PermissionDenied
from django.db import models
from django.utils.translation import gettext_lazy as _
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import BASE_URL
from .base_model import BookWyrmModel
@ -46,7 +46,7 @@ class Report(BookWyrmModel):
raise PermissionDenied()
def get_remote_id(self):
return f"https://{DOMAIN}/settings/reports/{self.id}"
return f"{BASE_URL}/settings/reports/{self.id}"
def comment(self, user, note):
"""comment on a report"""

View file

@ -1,13 +1,15 @@
""" puttin' books on shelves """
import re
from typing import Optional, Iterable
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.db import models
from django.utils import timezone
from bookwyrm import activitypub
from bookwyrm.settings import DOMAIN
from bookwyrm.settings import BASE_URL
from bookwyrm.tasks import BROADCAST
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import CollectionItemMixin, OrderedCollectionMixin
from .base_model import BookWyrmModel
from . import fields
@ -44,8 +46,9 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel):
"""set the identifier"""
super().save(*args, priority=priority, **kwargs)
if not self.identifier:
# this needs the auto increment ID from the save() above
self.identifier = self.get_identifier()
super().save(*args, **kwargs, broadcast=False)
super().save(*args, **kwargs, broadcast=False, update_fields={"identifier"})
def get_identifier(self):
"""custom-shelf-123 for the url"""
@ -71,7 +74,7 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel):
@property
def local_path(self):
"""No slugs"""
return self.get_remote_id().replace(f"https://{DOMAIN}", "")
return self.get_remote_id().replace(BASE_URL, "")
def raise_not_deletable(self, viewer):
"""don't let anyone delete a default shelf"""
@ -100,10 +103,21 @@ class ShelfBook(CollectionItemMixin, BookWyrmModel):
activity_serializer = activitypub.ShelfItem
collection_field = "shelf"
def save(self, *args, priority=BROADCAST, **kwargs):
def save(
self,
*args,
priority=BROADCAST,
update_fields: Optional[Iterable[str]] = None,
**kwargs,
):
if not self.user:
self.user = self.shelf.user
if self.id and self.user.local:
update_fields = add_update_fields(update_fields, "user")
is_update = self.id is not None
super().save(*args, priority=priority, update_fields=update_fields, **kwargs)
if is_update and self.user.local:
# remove all caches related to all editions of this book
cache.delete_many(
[
@ -111,7 +125,6 @@ class ShelfBook(CollectionItemMixin, BookWyrmModel):
for book in self.book.parent_work.editions.all()
]
)
super().save(*args, priority=priority, **kwargs)
def delete(self, *args, **kwargs):
if self.id and self.user.local:

View file

@ -1,5 +1,6 @@
""" the particulars for this instance of BookWyrm """
import datetime
from typing import Optional, Iterable
from urllib.parse import urljoin
import uuid
@ -10,8 +11,12 @@ from django.dispatch import receiver
from django.utils import timezone
from model_utils import FieldTracker
from bookwyrm.connectors.abstract_connector import get_data
from bookwyrm.preview_images import generate_site_preview_image_task
from bookwyrm.settings import DOMAIN, ENABLE_PREVIEW_IMAGES, STATIC_FULL_URL
from bookwyrm.settings import BASE_URL, ENABLE_PREVIEW_IMAGES, STATIC_FULL_URL
from bookwyrm.settings import RELEASE_API
from bookwyrm.tasks import app, MISC
from bookwyrm.utils.db import add_update_fields
from .base_model import BookWyrmModel, new_access_code
from .user import User
from .fields import get_absolute_url
@ -45,7 +50,7 @@ class SiteSettings(SiteModel):
default_theme = models.ForeignKey(
"Theme", null=True, blank=True, on_delete=models.SET_NULL
)
version = models.CharField(null=True, blank=True, max_length=10)
available_version = models.CharField(null=True, blank=True, max_length=10)
# admin setup options
install_mode = models.BooleanField(default=False)
@ -96,6 +101,8 @@ class SiteSettings(SiteModel):
imports_enabled = models.BooleanField(default=True)
import_size_limit = models.IntegerField(default=0)
import_limit_reset = models.IntegerField(default=0)
user_exports_enabled = models.BooleanField(default=False)
user_import_time_limit = models.IntegerField(default=48)
field_tracker = FieldTracker(fields=["name", "instance_tagline", "logo"])
@ -131,16 +138,19 @@ class SiteSettings(SiteModel):
return get_absolute_url(uploaded)
return urljoin(STATIC_FULL_URL, default_path)
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""if require_confirm_email is disabled, make sure no users are pending,
if enabled, make sure invite_question_text is not empty"""
if not self.invite_question_text:
self.invite_question_text = "What is your favourite book?"
update_fields = add_update_fields(update_fields, "invite_question_text")
super().save(*args, update_fields=update_fields, **kwargs)
if not self.require_confirm_email:
User.objects.filter(is_active=False, deactivation_reason="pending").update(
is_active=True, deactivation_reason=None
)
if not self.invite_question_text:
self.invite_question_text = "What is your favourite book?"
super().save(*args, **kwargs)
class Theme(SiteModel):
@ -183,7 +193,7 @@ class SiteInvite(models.Model):
@property
def link(self):
"""formats the invite link"""
return f"https://{DOMAIN}/invite/{self.code}"
return f"{BASE_URL}/invite/{self.code}"
class InviteRequest(BookWyrmModel):
@ -230,7 +240,7 @@ class PasswordReset(models.Model):
@property
def link(self):
"""formats the invite link"""
return f"https://{DOMAIN}/password-reset/{self.code}"
return f"{BASE_URL}/password-reset/{self.code}"
# pylint: disable=unused-argument
@ -243,3 +253,14 @@ def preview_image(instance, *args, **kwargs):
if len(changed_fields) > 0:
generate_site_preview_image_task.delay()
@app.task(queue=MISC)
def check_for_updates_task():
"""See if git remote knows about a new version"""
site = SiteSettings.objects.get()
release = get_data(RELEASE_API, timeout=3)
available_version = release.get("tag_name", None)
if available_version:
site.available_version = available_version
site.save(update_fields=["available_version"])

View file

@ -1,6 +1,6 @@
""" models for storing different kinds of Activities """
from dataclasses import MISSING
from typing import Optional
from typing import Optional, Iterable
import re
from django.apps import apps
@ -12,12 +12,15 @@ from django.db.models import Q
from django.dispatch import receiver
from django.template.loader import get_template
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
from model_utils import FieldTracker
from model_utils.managers import InheritanceManager
from bookwyrm import activitypub
from bookwyrm.preview_images import generate_edition_preview_image_task
from bookwyrm.settings import ENABLE_PREVIEW_IMAGES
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import ActivitypubMixin, ActivityMixin
from .activitypub_mixin import OrderedCollectionPageMixin
from .base_model import BookWyrmModel
@ -78,13 +81,18 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
"""default sorting"""
ordering = ("-published_date",)
indexes = [
models.Index(fields=["remote_id"]),
models.Index(fields=["thread_id"]),
]
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""save and notify"""
if self.reply_parent:
if self.thread_id is None and self.reply_parent:
self.thread_id = self.reply_parent.thread_id or self.reply_parent_id
update_fields = add_update_fields(update_fields, "thread_id")
super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
if not self.reply_parent:
self.thread_id = self.id
@ -107,14 +115,14 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
@property
def recipients(self):
"""tagged users who definitely need to get this status in broadcast"""
mentions = [u for u in self.mention_users.all() if not u.local]
mentions = {u for u in self.mention_users.all() if not u.local}
if (
hasattr(self, "reply_parent")
and self.reply_parent
and not self.reply_parent.user.local
):
mentions.append(self.reply_parent.user)
return list(set(mentions))
mentions.add(self.reply_parent.user)
return list(mentions)
@classmethod
def ignore_activity(
@ -178,6 +186,24 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
"""you can't boost dms"""
return self.privacy in ["unlisted", "public"]
@property
def page_title(self):
"""title of the page when only this status is shown"""
return _("%(display_name)s's status") % {"display_name": self.user.display_name}
@property
def page_description(self):
"""description of the page in meta tags when only this status is shown"""
return None
@property
def page_image(self):
"""image to use as preview in meta tags when only this status is shown"""
if self.mention_books.exists():
book = self.mention_books.first()
return book.preview_image or book.cover
return self.user.preview_image
def to_replies(self, **kwargs):
"""helper function for loading AP serialized replies to a status"""
return self.to_ordered_collection(
@ -301,6 +327,10 @@ class BookStatus(Status):
abstract = True
@property
def page_image(self):
return self.book.preview_image or self.book.cover or super().page_image
class Comment(BookStatus):
"""like a review but without a rating and transient"""
@ -332,17 +362,26 @@ class Comment(BookStatus):
activity_serializer = activitypub.Comment
@property
def page_title(self):
return _("%(display_name)s's comment on %(book_title)s") % {
"display_name": self.user.display_name,
"book_title": self.book.title,
}
class Quotation(BookStatus):
"""like a review but without a rating and transient"""
quote = fields.HtmlField()
raw_quote = models.TextField(blank=True, null=True)
position = models.IntegerField(
validators=[MinValueValidator(0)], null=True, blank=True
position = models.TextField(
null=True,
blank=True,
)
endposition = models.IntegerField(
validators=[MinValueValidator(0)], null=True, blank=True
endposition = models.TextField(
null=True,
blank=True,
)
position_mode = models.CharField(
max_length=3,
@ -355,10 +394,10 @@ class Quotation(BookStatus):
def _format_position(self) -> Optional[str]:
"""serialize page position"""
beg = self.position
end = self.endposition or 0
end = self.endposition
if self.position_mode != "PG" or not beg:
return None
return f"pp. {beg}-{end}" if end > beg else f"p. {beg}"
return f"pp. {beg}-{end}" if end else f"p. {beg}"
@property
def pure_content(self):
@ -374,6 +413,13 @@ class Quotation(BookStatus):
activity_serializer = activitypub.Quotation
@property
def page_title(self):
return _("%(display_name)s's quote from %(book_title)s") % {
"display_name": self.user.display_name,
"book_title": self.book.title,
}
class Review(BookStatus):
"""a book review"""
@ -403,14 +449,22 @@ class Review(BookStatus):
"""indicate the book in question for mastodon (or w/e) users"""
return self.content
@property
def page_title(self):
return _("%(display_name)s's review of %(book_title)s") % {
"display_name": self.user.display_name,
"book_title": self.book.title,
}
activity_serializer = activitypub.Review
pure_type = "Article"
def save(self, *args, **kwargs):
"""clear rating caches"""
super().save(*args, **kwargs)
if self.book.parent_work:
cache.delete(f"book-rating-{self.book.parent_work.id}")
super().save(*args, **kwargs)
class ReviewRating(Review):
@ -426,6 +480,18 @@ class ReviewRating(Review):
template = get_template("snippets/generated_status/rating.html")
return template.render({"book": self.book, "rating": self.rating}).strip()
@property
def page_description(self):
return ngettext_lazy(
"%(display_name)s rated %(book_title)s: %(display_rating).1f star",
"%(display_name)s rated %(book_title)s: %(display_rating).1f stars",
"display_rating",
) % {
"display_name": self.user.display_name,
"book_title": self.book.title,
"display_rating": self.rating,
}
activity_serializer = activitypub.Rating
pure_type = "Note"

View file

@ -1,28 +1,31 @@
""" database schema for user data """
import datetime
import re
import zoneinfo
from typing import Optional, Iterable
from urllib.parse import urlparse
from uuid import uuid4
from django.apps import apps
from django.contrib.auth.models import AbstractUser
from django.contrib.postgres.fields import ArrayField, CICharField
from django.contrib.postgres.fields import ArrayField as DjangoArrayField
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.dispatch import receiver
from django.db import models, transaction, IntegrityError
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from model_utils import FieldTracker
import pytz
from bookwyrm import activitypub
from bookwyrm.connectors import get_data, ConnectorException
from bookwyrm.models.shelf import Shelf
from bookwyrm.models.status import Status
from bookwyrm.preview_images import generate_user_preview_image_task
from bookwyrm.settings import DOMAIN, ENABLE_PREVIEW_IMAGES, USE_HTTPS, LANGUAGES
from bookwyrm.settings import BASE_URL, ENABLE_PREVIEW_IMAGES, LANGUAGES
from bookwyrm.signatures import create_key_pair
from bookwyrm.tasks import app, MISC
from bookwyrm.utils import regex
from bookwyrm.utils.db import add_update_fields
from .activitypub_mixin import OrderedCollectionPageMixin, ActivitypubMixin
from .base_model import BookWyrmModel, DeactivationReason, new_access_code
from .federated_server import FederatedServer
@ -42,12 +45,6 @@ def get_feed_filter_choices():
return [f[0] for f in FeedFilterChoices]
def site_link():
"""helper for generating links to the site"""
protocol = "https" if USE_HTTPS else "http"
return f"{protocol}://{DOMAIN}"
# pylint: disable=too-many-public-methods
class User(OrderedCollectionPageMixin, AbstractUser):
"""a user who wants to read books"""
@ -81,11 +78,12 @@ class User(OrderedCollectionPageMixin, AbstractUser):
summary = fields.HtmlField(null=True, blank=True)
local = models.BooleanField(default=False)
bookwyrm_user = fields.BooleanField(default=True)
localname = CICharField(
localname = models.CharField(
max_length=255,
null=True,
unique=True,
validators=[fields.validate_localname],
db_collation="case_insensitive",
)
# name is your display name, which you can change at will
name = fields.CharField(max_length=100, null=True, blank=True)
@ -162,7 +160,7 @@ class User(OrderedCollectionPageMixin, AbstractUser):
show_guided_tour = models.BooleanField(default=True)
# feed options
feed_status_types = ArrayField(
feed_status_types = DjangoArrayField(
models.CharField(max_length=10, blank=False, choices=FeedFilterChoices),
size=8,
default=get_feed_filter_choices,
@ -171,8 +169,8 @@ class User(OrderedCollectionPageMixin, AbstractUser):
summary_keys = models.JSONField(null=True)
preferred_timezone = models.CharField(
choices=[(str(tz), str(tz)) for tz in pytz.all_timezones],
default=str(pytz.utc),
choices=[(str(tz), str(tz)) for tz in sorted(zoneinfo.available_timezones())],
default=str(datetime.timezone.utc),
max_length=255,
)
preferred_language = models.CharField(
@ -198,6 +196,14 @@ class User(OrderedCollectionPageMixin, AbstractUser):
hotp_secret = models.CharField(max_length=32, default=None, blank=True, null=True)
hotp_count = models.IntegerField(default=0, blank=True, null=True)
class Meta(AbstractUser.Meta):
"""indexes"""
indexes = [
models.Index(fields=["username"]),
models.Index(fields=["is_active", "local"]),
]
@property
def active_follower_requests(self):
"""Follow requests from active users"""
@ -206,8 +212,7 @@ class User(OrderedCollectionPageMixin, AbstractUser):
@property
def confirmation_link(self):
"""helper for generating confirmation links"""
link = site_link()
return f"{link}/confirm-email/{self.confirmation_code}"
return f"{BASE_URL}/confirm-email/{self.confirmation_code}"
@property
def following_link(self):
@ -326,6 +331,7 @@ class User(OrderedCollectionPageMixin, AbstractUser):
"https://w3id.org/security/v1",
{
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"Hashtag": "as:Hashtag",
"schema": "http://schema.org#",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
@ -335,13 +341,14 @@ class User(OrderedCollectionPageMixin, AbstractUser):
]
return activity_object
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""populate fields for new local users"""
created = not bool(self.id)
if not self.local and not re.match(regex.FULL_USERNAME, self.username):
# generate a username that uses the domain (webfinger format)
actor_parts = urlparse(self.remote_id)
self.username = f"{self.username}@{actor_parts.netloc}"
self.username = f"{self.username}@{actor_parts.hostname}"
update_fields = add_update_fields(update_fields, "username")
# this user already exists, no need to populate fields
if not created:
@ -350,26 +357,34 @@ class User(OrderedCollectionPageMixin, AbstractUser):
elif not self.deactivation_date:
self.deactivation_date = timezone.now()
super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
return
# this is a new remote user, we need to set their remote server field
if not self.local:
super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
transaction.on_commit(lambda: set_remote_server(self.id))
return
with transaction.atomic():
# populate fields for local users
link = site_link()
self.remote_id = f"{link}/user/{self.localname}"
self.remote_id = f"{BASE_URL}/user/{self.localname}"
self.followers_url = f"{self.remote_id}/followers"
self.inbox = f"{self.remote_id}/inbox"
self.shared_inbox = f"{link}/inbox"
self.shared_inbox = f"{BASE_URL}/inbox"
self.outbox = f"{self.remote_id}/outbox"
update_fields = add_update_fields(
update_fields,
"remote_id",
"followers_url",
"inbox",
"shared_inbox",
"outbox",
)
# an id needs to be set before we can proceed with related models
super().save(*args, **kwargs)
super().save(*args, update_fields=update_fields, **kwargs)
# make users editors by default
try:
@ -509,18 +524,44 @@ class KeyPair(ActivitypubMixin, BookWyrmModel):
activity_serializer = activitypub.PublicKey
serialize_reverse_fields = [("owner", "owner", "id")]
class Meta:
"""indexes"""
indexes = [
models.Index(fields=["remote_id"]),
]
def get_remote_id(self):
# self.owner is set by the OneToOneField on User
return f"{self.owner.remote_id}/#main-key"
def save(self, *args, **kwargs):
def save(self, *args, update_fields: Optional[Iterable[str]] = None, **kwargs):
"""create a key pair"""
# no broadcasting happening here
if "broadcast" in kwargs:
del kwargs["broadcast"]
if not self.public_key:
self.private_key, self.public_key = create_key_pair()
return super().save(*args, **kwargs)
update_fields = add_update_fields(
update_fields, "private_key", "public_key"
)
super().save(*args, update_fields=update_fields, **kwargs)
@app.task(queue=MISC)
def erase_user_data(user_id):
"""Erase any custom data about this user asynchronously
This is for deleted historical user data that pre-dates data
being cleared automatically"""
user = User.objects.get(id=user_id)
user.erase_user_data()
user.save(
broadcast=False,
update_fields=["email", "avatar", "preview_image", "summary", "name"],
)
user.erase_user_statuses(broadcast=False)
@app.task(queue=MISC)
@ -529,7 +570,7 @@ def set_remote_server(user_id, allow_external_connections=False):
user = User.objects.get(id=user_id)
actor_parts = urlparse(user.remote_id)
federated_server = get_or_create_remote_server(
actor_parts.netloc, allow_external_connections=allow_external_connections
actor_parts.hostname, allow_external_connections=allow_external_connections
)
# if we were unable to find the server, we need to create a new entry for it
if not federated_server:

View file

@ -1,4 +1,5 @@
""" Generate social media preview images for twitter/mastodon/etc """
import math
import os
import textwrap
@ -42,8 +43,8 @@ def get_imagefont(name, size):
return ImageFont.truetype(path, size)
except KeyError:
logger.error("Font %s not found in config", name)
except OSError:
logger.error("Could not load font %s from file", name)
except OSError as err:
logger.error("Could not load font %s from file: %s", name, err)
return ImageFont.load_default()
@ -59,7 +60,7 @@ def get_font(weight, size=28):
font.set_variation_by_name("Bold")
if weight == "regular":
font.set_variation_by_name("Regular")
except AttributeError:
except OSError:
pass
return font
@ -174,11 +175,13 @@ def generate_instance_layer(content_width):
site = models.SiteSettings.objects.get()
if site.logo_small:
logo_img = Image.open(site.logo_small)
with Image.open(site.logo_small) as logo_img:
logo_img.load()
else:
try:
static_path = os.path.join(settings.STATIC_ROOT, "images/logo-small.png")
logo_img = Image.open(static_path)
with Image.open(static_path) as logo_img:
logo_img.load()
except FileNotFoundError:
logo_img = None
@ -210,18 +213,9 @@ def generate_instance_layer(content_width):
def generate_rating_layer(rating, content_width):
"""Places components for rating preview"""
try:
icon_star_full = Image.open(
os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png")
)
icon_star_empty = Image.open(
os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png")
)
icon_star_half = Image.open(
os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png")
)
except FileNotFoundError:
return None
path_star_full = os.path.join(settings.STATIC_ROOT, "images/icons/star-full.png")
path_star_empty = os.path.join(settings.STATIC_ROOT, "images/icons/star-empty.png")
path_star_half = os.path.join(settings.STATIC_ROOT, "images/icons/star-half.png")
icon_size = 64
icon_margin = 10
@ -236,17 +230,23 @@ def generate_rating_layer(rating, content_width):
position_x = 0
for _ in range(math.floor(rating)):
rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0))
position_x = position_x + icon_size + icon_margin
try:
with Image.open(path_star_full) as icon_star_full:
for _ in range(math.floor(rating)):
rating_layer_mask.alpha_composite(icon_star_full, (position_x, 0))
position_x = position_x + icon_size + icon_margin
if math.floor(rating) != math.ceil(rating):
rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0))
position_x = position_x + icon_size + icon_margin
if math.floor(rating) != math.ceil(rating):
with Image.open(path_star_half) as icon_star_half:
rating_layer_mask.alpha_composite(icon_star_half, (position_x, 0))
position_x = position_x + icon_size + icon_margin
for _ in range(5 - math.ceil(rating)):
rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0))
position_x = position_x + icon_size + icon_margin
with Image.open(path_star_empty) as icon_star_empty:
for _ in range(5 - math.ceil(rating)):
rating_layer_mask.alpha_composite(icon_star_empty, (position_x, 0))
position_x = position_x + icon_size + icon_margin
except FileNotFoundError:
return None
rating_layer_mask = rating_layer_mask.getchannel("A")
rating_layer_mask = ImageOps.invert(rating_layer_mask)
@ -289,7 +289,8 @@ def generate_preview_image(
texts = texts or {}
# Cover
try:
inner_img_layer = Image.open(picture)
with Image.open(picture) as inner_img_layer:
inner_img_layer.load()
inner_img_layer.thumbnail(
(inner_img_width, inner_img_height), Image.Resampling.LANCZOS
)

View file

@ -19,7 +19,6 @@ DOMAIN = env("DOMAIN")
with open("VERSION", encoding="utf-8") as f:
version = f.read()
version = version.replace("\n", "")
f.close()
VERSION = version
@ -30,8 +29,11 @@ RELEASE_API = env(
PAGE_LENGTH = env.int("PAGE_LENGTH", 15)
DEFAULT_LANGUAGE = env("DEFAULT_LANGUAGE", "English")
# TODO: extend maximum age to 1 year once termination of active sessions
# is implemented (see bookwyrm-social#2278, bookwyrm-social#3082).
SESSION_COOKIE_AGE = env.int("SESSION_COOKIE_AGE", 3600 * 24 * 30) # 1 month
JS_CACHE = "ac315a3b"
JS_CACHE = "8a89cad7"
# email
EMAIL_BACKEND = env("EMAIL_BACKEND", "django.core.mail.backends.smtp.EmailBackend")
@ -99,11 +101,14 @@ INSTALLED_APPS = [
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.humanize",
"oauth2_provider",
"file_resubmit",
"sass_processor",
"bookwyrm",
"celery",
"django_celery_beat",
"imagekit",
"pgtrigger",
"storages",
]
@ -119,6 +124,7 @@ MIDDLEWARE = [
"bookwyrm.middleware.IPBlocklistMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"bookwyrm.middleware.FileTooBig",
]
ROOT_URLCONF = "bookwyrm.urls"
@ -242,17 +248,22 @@ if env.bool("USE_DUMMY_CACHE", False):
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
}
},
"file_resubmit": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
"LOCATION": "/tmp/file_resubmit_tests/",
},
}
else:
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": REDIS_ACTIVITY_URL,
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
},
}
},
"file_resubmit": {
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/tmp/file_resubmit/",
},
}
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
@ -308,6 +319,7 @@ LANGUAGES = [
("eu-es", _("Euskara (Basque)")),
("gl-es", _("Galego (Galician)")),
("it-it", _("Italiano (Italian)")),
("ko-kr", _("한국어 (Korean)")),
("fi-fi", _("Suomi (Finnish)")),
("fr-fr", _("Français (French)")),
("lt-lt", _("Lietuvių (Lithuanian)")),
@ -318,6 +330,7 @@ LANGUAGES = [
("pt-pt", _("Português Europeu (European Portuguese)")),
("ro-ro", _("Română (Romanian)")),
("sv-se", _("Svenska (Swedish)")),
("uk-ua", _("Українська (Ukrainian)")),
("zh-hans", _("简体中文 (Simplified Chinese)")),
("zh-hant", _("繁體中文 (Traditional Chinese)")),
]
@ -331,35 +344,37 @@ TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
agent = requests.utils.default_user_agent()
USER_AGENT = f"{agent} (BookWyrm/{VERSION}; +https://{DOMAIN}/)"
# Imagekit generated thumbnails
ENABLE_THUMBNAIL_GENERATION = env.bool("ENABLE_THUMBNAIL_GENERATION", False)
IMAGEKIT_CACHEFILE_DIR = "thumbnails"
IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY = "bookwyrm.thumbnail_generation.Strategy"
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
CSP_ADDITIONAL_HOSTS = env.list("CSP_ADDITIONAL_HOSTS", [])
# Storage
PROTOCOL = "http"
if USE_HTTPS:
PROTOCOL = "https"
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
PORT = env.int("PORT", 443 if USE_HTTPS else 80)
if (USE_HTTPS and PORT == 443) or (not USE_HTTPS and PORT == 80):
NETLOC = DOMAIN
else:
NETLOC = f"{DOMAIN}:{PORT}"
BASE_URL = f"{PROTOCOL}://{NETLOC}"
CSRF_TRUSTED_ORIGINS = [BASE_URL]
USER_AGENT = f"BookWyrm (BookWyrm/{VERSION}; +{BASE_URL})"
# Storage
USE_S3 = env.bool("USE_S3", False)
USE_AZURE = env.bool("USE_AZURE", False)
S3_SIGNED_URL_EXPIRY = env.int("S3_SIGNED_URL_EXPIRY", 900)
if USE_S3:
# AWS settings
@ -371,44 +386,117 @@ if USE_S3:
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", None)
AWS_DEFAULT_ACL = "public-read"
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", f"{PROTOCOL}:")
# Storages
STORAGES = {
"default": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "images",
"default_acl": "public-read",
"file_overwrite": False,
},
},
"staticfiles": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "static",
"default_acl": "public-read",
},
},
"exports": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
"location": "images",
"default_acl": None,
"file_overwrite": False,
},
},
}
# S3 Static settings
STATIC_LOCATION = "static"
STATIC_URL = f"{PROTOCOL}://{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/"
STATICFILES_STORAGE = "bookwyrm.storage_backends.StaticStorage"
STATIC_URL = f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/"
STATIC_FULL_URL = STATIC_URL
# S3 Media settings
MEDIA_LOCATION = "images"
MEDIA_URL = f"{PROTOCOL}://{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/"
MEDIA_URL = f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/"
MEDIA_FULL_URL = MEDIA_URL
STATIC_FULL_URL = STATIC_URL
DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.ImagesStorage"
CSP_DEFAULT_SRC = ["'self'", AWS_S3_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = ["'self'", AWS_S3_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
# Content Security Policy
CSP_DEFAULT_SRC = [
"'self'",
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
if AWS_S3_CUSTOM_DOMAIN
else None,
] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = [
"'self'",
f"{AWS_S3_URL_PROTOCOL}//{AWS_S3_CUSTOM_DOMAIN}"
if AWS_S3_CUSTOM_DOMAIN
else None,
] + CSP_ADDITIONAL_HOSTS
elif USE_AZURE:
# Azure settings
AZURE_ACCOUNT_NAME = env("AZURE_ACCOUNT_NAME")
AZURE_ACCOUNT_KEY = env("AZURE_ACCOUNT_KEY")
AZURE_CONTAINER = env("AZURE_CONTAINER")
AZURE_CUSTOM_DOMAIN = env("AZURE_CUSTOM_DOMAIN")
# Storages
STORAGES = {
"default": {
"BACKEND": "storages.backends.azure_storage.AzureStorage",
"OPTIONS": {
"location": "images",
"overwrite_files": False,
},
},
"staticfiles": {
"BACKEND": "storages.backends.azure_storage.AzureStorage",
"OPTIONS": {
"location": "static",
},
},
"exports": {
"BACKEND": None, # not implemented yet
},
}
# Azure Static settings
STATIC_LOCATION = "static"
STATIC_URL = (
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{STATIC_LOCATION}/"
)
STATICFILES_STORAGE = "bookwyrm.storage_backends.AzureStaticStorage"
STATIC_FULL_URL = STATIC_URL
# Azure Media settings
MEDIA_LOCATION = "images"
MEDIA_URL = (
f"{PROTOCOL}://{AZURE_CUSTOM_DOMAIN}/{AZURE_CONTAINER}/{MEDIA_LOCATION}/"
)
MEDIA_FULL_URL = MEDIA_URL
STATIC_FULL_URL = STATIC_URL
DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.AzureImagesStorage"
# Content Security Policy
CSP_DEFAULT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = ["'self'", AZURE_CUSTOM_DOMAIN] + CSP_ADDITIONAL_HOSTS
else:
# Storages
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
},
"exports": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
"OPTIONS": {
"location": "exports",
},
},
}
# Static settings
STATIC_URL = "/static/"
STATIC_FULL_URL = BASE_URL + STATIC_URL
# Media settings
MEDIA_URL = "/images/"
MEDIA_FULL_URL = f"{PROTOCOL}://{DOMAIN}{MEDIA_URL}"
STATIC_FULL_URL = f"{PROTOCOL}://{DOMAIN}{STATIC_URL}"
MEDIA_FULL_URL = BASE_URL + MEDIA_URL
# Content Security Policy
CSP_DEFAULT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS
CSP_SCRIPT_SRC = ["'self'"] + CSP_ADDITIONAL_HOSTS
@ -431,3 +519,7 @@ if HTTP_X_FORWARDED_PROTO:
# Do not change this setting unless you already have an existing
# user with the same username - in which case you should change it!
INSTANCE_ACTOR_USERNAME = "bookwyrm.instance.actor"
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env
# (note the difference in variable names).
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_MiB", 100) << 20

View file

@ -111,6 +111,10 @@ const tries = {
},
},
f: {
b: {
2: "FB2",
3: "FB3",
},
l: {
a: {
c: "FLAC",

View file

@ -1,63 +0,0 @@
"""Handles backends for storages"""
import os
from tempfile import SpooledTemporaryFile
from storages.backends.s3boto3 import S3Boto3Storage
from storages.backends.azure_storage import AzureStorage
class StaticStorage(S3Boto3Storage): # pylint: disable=abstract-method
"""Storage class for Static contents"""
location = "static"
default_acl = "public-read"
class ImagesStorage(S3Boto3Storage): # pylint: disable=abstract-method
"""Storage class for Image files"""
location = "images"
default_acl = "public-read"
file_overwrite = False
"""
This is our custom version of S3Boto3Storage that fixes a bug in
boto3 where the passed in file is closed upon upload.
From:
https://github.com/matthewwithanm/django-imagekit/issues/391#issuecomment-275367006
https://github.com/boto/boto3/issues/929
https://github.com/matthewwithanm/django-imagekit/issues/391
"""
def _save(self, name, content):
"""
We create a clone of the content file as when this is passed to
boto3 it wrongly closes the file upon upload where as the storage
backend expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size. This file will be automatically deleted when closed by
# boto3 or after exiting the `with` statement if the boto3 is fixed
with SpooledTemporaryFile() as content_autoclose:
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the
# content_autoclose instance
return super()._save(name, content_autoclose)
class AzureStaticStorage(AzureStorage): # pylint: disable=abstract-method
"""Storage class for Static contents"""
location = "static"
class AzureImagesStorage(AzureStorage): # pylint: disable=abstract-method
"""Storage class for Image files"""
location = "images"
overwrite_files = False

View file

@ -0,0 +1,16 @@
{% extends 'layout.html' %}
{% load i18n %}
{% block title %}{% trans "File too large" %}{% endblock %}
{% block content %}
<div class="block">
<h1 class="title">{% trans "File too large" %}</h1>
<p class="content">{% trans "The file you are uploading is too large." %}</p>
<p class="content">
{% blocktrans trimmed %}
You you can try using a smaller file, or ask your BookWyrm server administrator to increase the <code>DATA_UPLOAD_MAX_MEMORY_SIZE</code> setting.
{% endblocktrans %}
</p>
</div>
{% endblock %}

View file

@ -31,10 +31,10 @@
</p>
</div>
<div class="columns">
<div class="columns is-multiline">
{% if superlatives.top_rated %}
{% with book=superlatives.top_rated.default_edition rating=superlatives.top_rated.rating %}
<div class="column is-one-third is-flex">
<div class="column is-half-tablet is-one-third-desktop is-flex-grow-1 is-flex">
<div class="media notification is-clipped">
<div class="media-left">
<a href="{{ book.local_path }}">
@ -53,7 +53,7 @@
{% if superlatives.wanted %}
{% with book=superlatives.wanted.default_edition %}
<div class="column is-one-third is-flex">
<div class="column is-half-tablet is-one-third-desktop is-flex-grow-1 is-flex">
<div class="media notification is-clipped">
<div class="media-left">
<a href="{{ book.local_path }}">
@ -72,7 +72,7 @@
{% if superlatives.controversial %}
{% with book=superlatives.controversial.default_edition %}
<div class="column is-one-third is-flex">
<div class="column is-half-tablet is-one-third-desktop is-flex-grow-1 is-flex">
<div class="media notification is-clipped">
<div class="media-left">
<a href="{{ book.local_path }}">

View file

@ -55,6 +55,8 @@
<p class="field"><label class="label" for="id_wikipedia_link">{% trans "Wikipedia link:" %}</label> {{ form.wikipedia_link }}</p>
<p class="field"><label class="label" for="id_wikidata">{% trans "Wikidata:" %}</label> {{ form.wikidata }}</p>
{% include 'snippets/form_errors.html' with errors_list=form.wikipedia_link.errors id="desc_wikipedia_link" %}
<p class="field"><label class="label" for="id_website">{% trans "Website:" %}</label> {{ form.website }}</p>

View file

@ -9,7 +9,8 @@
{% block title %}{{ book|book_title }}{% endblock %}
{% block opengraph %}
{% include 'snippets/opengraph.html' with title=book.title description=book|book_description image=book.preview_image %}
{% firstof book.preview_image book.cover as book_image %}
{% include 'snippets/opengraph.html' with title=book.title description=book|book_description image=book_image %}
{% endblock %}
{% block content %}
@ -44,18 +45,22 @@
{% endif %}
{% if book.series %}
<meta itemprop="position" content="{{ book.series_number }}">
{% spaceless %}
<span itemprop="isPartOf" itemscope itemtype="https://schema.org/BookSeries">
{% if book.authors.exists %}
<a href="{% url 'book-series-by' book.authors.first.id %}?series_name={{ book.series | urlencode }}"
itemprop="url">
{% endif %}
<span itemprop="name">{{ book.series }}</span>
{% if book.series_number %} #{{ book.series_number }}{% endif %}
{% if book.authors.exists %}
</a>
{% endif %}
</span>
{% if book.series_number %}
<span>, #</span>
<span itemprop="position">{{ book.series_number }}</span>
{% endif %}
{% endspaceless %}
{% endif %}
</p>
{% endif %}

View file

@ -6,8 +6,8 @@
{% block content %}
<h1 class="title">{% trans "Confirm your email address" %}</h1>
<div class="columns">
<div class="column">
<div class="columns is-multiline">
<div class="column is-full is-half-desktop">
<div class="block content">
<section class="block">
<p>{% trans "A confirmation code has been sent to the email address you used to register your account." %}</p>

View file

@ -2,10 +2,10 @@
<div style="font-family: BlinkMacSystemFont,-apple-system,'Segoe UI',Roboto,Oxygen,Ubuntu,Cantarell,'Fira Sans','Droid Sans','Helvetica Neue',Helvetica,Arial,sans-serif; border-radius: 6px; background-color: #efefef; max-width: 632px;">
<div style="padding: 1rem; overflow: auto;">
<div style="float: left; margin-right: 1rem;">
<a style="color: #3273dc;" href="https://{{ domain }}" style="text-decoration: none;"><img src="{{ logo }}" alt="logo" loading="lazy" decoding="async"></a>
<a style="color: #3273dc;" href="{{ base_url }}" style="text-decoration: none;"><img src="{{ logo }}" alt="logo" loading="lazy" decoding="async"></a>
</div>
<div>
<a style="color: black; text-decoration: none" href="https://{{ domain }}" style="text-decoration: none;"><strong>{{ site_name }}</strong><br>
<a style="color: black; text-decoration: none" href="{{ base_url }}" style="text-decoration: none;"><strong>{{ site_name }}</strong><br>
{{ domain }}</a>
</div>
</div>
@ -18,9 +18,9 @@
</div>
<div style="padding: 1rem; font-size: 0.8rem;">
<p style="margin: 0; color: #333;">{% blocktrans %}BookWyrm hosted on <a style="color: #3273dc;" href="https://{{ domain }}">{{ site_name }}</a>{% endblocktrans %}</p>
<p style="margin: 0; color: #333;">{% blocktrans %}BookWyrm hosted on <a style="color: #3273dc;" href="{{ base_url }}">{{ site_name }}</a>{% endblocktrans %}</p>
{% if user %}
<p style="margin: 0; color: #333;"><a style="color: #3273dc;" href="https://{{ domain }}{% url 'prefs-profile' %}">{% trans "Email preference" %}</a></p>
<p style="margin: 0; color: #333;"><a style="color: #3273dc;" href="{{ base_url }}{% url 'prefs-profile' %}">{% trans "Email preference" %}</a></p>
{% endif %}
</div>
</div>

View file

@ -12,6 +12,6 @@
<p>
{% url 'code-of-conduct' as coc_path %}
{% url 'about' as about_path %}
{% blocktrans %}Learn more <a href="https://{{ domain }}{{ about_path }}">about {{ site_name }}</a>.{% endblocktrans %}
{% blocktrans %}Learn more <a href="{{ base_url }}{{ about_path }}">about {{ site_name }}</a>.{% endblocktrans %}
</p>
{% endblock %}

View file

@ -5,6 +5,6 @@
{{ invite_link }}
{% blocktrans %}Learn more about {{ site_name }}:{% endblocktrans %} https://{{ domain }}{% url 'about' %}
{% blocktrans %}Learn more about {{ site_name }}:{% endblocktrans %} {{ base_url }}{% url 'about' %}
{% endblock %}

View file

@ -41,7 +41,7 @@
</section>
{% endif %}
{% if annual_summary_year and tab.key == 'home' %}
{% if annual_summary_year and tab.key == 'home' and has_summary_read_throughs %}
<section class="block is-hidden" data-hide="hide_annual_summary_{{ annual_summary_year }}">
{% include 'feed/summary_card.html' with year=annual_summary_year %}
<hr>

Some files were not shown because too many files have changed in this diff Show more