models.py 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449
  1. import calendar
  2. import datetime
  3. import json
  4. import logging
  5. from collections import defaultdict
  6. from typing import Optional
  7. from uuid import uuid4
  8. from zoneinfo import ZoneInfo
  9. import pendulum
  10. import pytz
  11. from beers.models import Beer
  12. from boardgames.models import BoardGame
  13. from books.koreader import process_koreader_sqlite_file
  14. from books.models import Book, Paper, BookPageLogData, BookLogData
  15. from bricksets.models import BrickSet
  16. from dataclass_wizard.errors import ParseError
  17. from django.conf import settings
  18. from django.contrib.auth import get_user_model
  19. from django.utils.functional import cached_property
  20. from django.core.files import File
  21. from django.db import models
  22. from django.urls import reverse
  23. from django.utils import timezone
  24. from django_extensions.db.models import TimeStampedModel
  25. from foods.models import Food, FoodLogData
  26. from imagekit.models import ImageSpecField
  27. from imagekit.processors import ResizeToFit
  28. from lifeevents.models import LifeEvent
  29. from locations.models import GeoLocation
  30. from moods.models import Mood
  31. from music.models import Artist, Track
  32. from podcasts.models import PodcastEpisode
  33. from profiles.utils import (
  34. end_of_day,
  35. end_of_month,
  36. end_of_week,
  37. fix_profile_historic_timezones,
  38. start_of_day,
  39. start_of_month,
  40. start_of_week,
  41. )
  42. from puzzles.models import Puzzle
  43. from scrobbles import dataclasses as logdata
  44. from scrobbles import constants
  45. from scrobbles.constants import LONG_PLAY_MEDIA, MEDIA_END_PADDING_SECONDS
  46. from scrobbles.importers.lastfm import LastFM
  47. from scrobbles.notifications import ScrobbleNtfyNotification
  48. from scrobbles.mixins import ScrobblableItem
  49. from scrobbles.stats import build_charts
  50. from scrobbles.utils import get_file_md5_hash, media_class_to_foreign_key
  51. from sports.models import SportEvent
  52. from tasks.models import Task
  53. from trails.models import Trail
  54. from videogames import retroarch
  55. from videogames.models import VideoGame
  56. from videos.models import Series, Video
  57. from webpages.models import WebPage
  58. logger = logging.getLogger(__name__)
  59. User = get_user_model()
  60. BNULL = {"blank": True, "null": True}
  61. POINTS_FOR_MOVEMENT_HISTORY = int(
  62. getattr(settings, "POINTS_FOR_MOVEMENT_HISTORY", 3)
  63. )
  64. class BaseFileImportMixin(TimeStampedModel):
  65. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  66. uuid = models.UUIDField(editable=False, default=uuid4)
  67. processing_started = models.DateTimeField(**BNULL)
  68. processed_finished = models.DateTimeField(**BNULL)
  69. process_log = models.TextField(**BNULL)
  70. process_count = models.IntegerField(**BNULL)
  71. class Meta:
  72. abstract = True
  73. def __str__(self):
  74. return f"{self.import_type} import on {self.human_start}"
  75. @property
  76. def human_start(self):
  77. start = "Unknown"
  78. if self.processing_started:
  79. start = self.processing_started.strftime("%B %d, %Y at %H:%M")
  80. return start
  81. @property
  82. def import_type(self) -> str:
  83. return "Unknown Import Source"
  84. def process(self, force=False):
  85. logger.warning("Process not implemented")
  86. def undo(self, dryrun=False):
  87. """Accepts the log from a scrobble import and removes the scrobbles"""
  88. from scrobbles.models import Scrobble
  89. if not self.process_log:
  90. logger.warning("No lines in process log found to undo")
  91. return
  92. for line in self.process_log.split("\n"):
  93. scrobble_id = line.split("\t")[0]
  94. scrobble = Scrobble.objects.filter(id=scrobble_id).first()
  95. if not scrobble:
  96. logger.warning(
  97. f"Could not find scrobble {scrobble_id} to undo"
  98. )
  99. continue
  100. logger.info(f"Removing scrobble {scrobble_id}")
  101. if not dryrun:
  102. scrobble.delete()
  103. self.processed_finished = None
  104. self.processing_started = None
  105. self.process_count = None
  106. self.process_log = ""
  107. self.save(
  108. update_fields=[
  109. "processed_finished",
  110. "processing_started",
  111. "process_log",
  112. "process_count",
  113. ]
  114. )
  115. def scrobbles(self) -> models.QuerySet:
  116. scrobble_ids = []
  117. if self.process_log:
  118. for line in self.process_log.split("\n"):
  119. sid = line.split("\t")[0]
  120. if sid:
  121. scrobble_ids.append(sid)
  122. return Scrobble.objects.filter(id__in=scrobble_ids)
  123. def mark_started(self):
  124. self.processing_started = timezone.now()
  125. self.save(update_fields=["processing_started"])
  126. def mark_finished(self):
  127. self.processed_finished = timezone.now()
  128. self.save(update_fields=["processed_finished"])
  129. def record_log(self, scrobbles):
  130. self.process_log = ""
  131. if not scrobbles:
  132. self.process_count = 0
  133. self.save(update_fields=["process_log", "process_count"])
  134. return
  135. for count, scrobble in enumerate(scrobbles):
  136. scrobble_str = (
  137. f"{scrobble.id}\t{scrobble.timestamp}\t{scrobble.media_obj}"
  138. )
  139. log_line = f"{scrobble_str}"
  140. if count > 0:
  141. log_line = "\n" + log_line
  142. self.process_log += log_line
  143. self.process_count = len(scrobbles)
  144. self.save(update_fields=["process_log", "process_count"])
  145. @property
  146. def upload_file_path(self):
  147. raise NotImplementedError
  148. class KoReaderImport(BaseFileImportMixin):
  149. class Meta:
  150. verbose_name = "KOReader Import"
  151. @property
  152. def import_type(self) -> str:
  153. return "KOReader"
  154. def get_absolute_url(self):
  155. return reverse(
  156. "scrobbles:koreader-import-detail", kwargs={"slug": self.uuid}
  157. )
  158. def get_path(instance, filename):
  159. extension = filename.split(".")[-1]
  160. uuid = instance.uuid
  161. return f"koreader-uploads/{uuid}.{extension}"
  162. @property
  163. def upload_file_path(self) -> str:
  164. if getattr(settings, "USE_S3_STORAGE"):
  165. path = self.sqlite_file.url
  166. else:
  167. path = self.sqlite_file.path
  168. return path
  169. sqlite_file = models.FileField(upload_to=get_path, **BNULL)
  170. def save_sqlite_file_to_self(self, file_path):
  171. with open(file_path, "rb") as f:
  172. self.sqlite_file.save(
  173. f"{self.user_id}-koreader-statistics.sqlite",
  174. File(f),
  175. save=True,
  176. )
  177. def file_md5_hash(self) -> str:
  178. if self.sqlite_file:
  179. return get_file_md5_hash(self.sqlite_file.path)
  180. return ""
  181. def process(self, force=False):
  182. if self.user.id == 1:
  183. fix_profile_historic_timezones(self.user.profile)
  184. if self.processed_finished and not force:
  185. logger.info(
  186. f"{self} already processed on {self.processed_finished}"
  187. )
  188. return
  189. self.mark_started()
  190. scrobbles = process_koreader_sqlite_file(
  191. self.upload_file_path, self.user.id
  192. )
  193. self.record_log(scrobbles)
  194. self.mark_finished()
  195. class AudioScrobblerTSVImport(BaseFileImportMixin):
  196. class Meta:
  197. verbose_name = "AudioScrobbler TSV Import"
  198. @property
  199. def import_type(self) -> str:
  200. return "AudiosScrobbler"
  201. def get_absolute_url(self):
  202. return reverse(
  203. "scrobbles:tsv-import-detail", kwargs={"slug": self.uuid}
  204. )
  205. def get_path(instance, filename):
  206. extension = filename.split(".")[-1]
  207. uuid = instance.uuid
  208. return f"audioscrobbler-uploads/{uuid}.{extension}"
  209. @property
  210. def upload_file_path(self):
  211. if getattr(settings, "USE_S3_STORAGE"):
  212. path = self.tsv_file.url
  213. else:
  214. path = self.tsv_file.path
  215. return path
  216. tsv_file = models.FileField(upload_to=get_path, **BNULL)
  217. def process(self, force=False):
  218. from scrobbles.importers.tsv import import_audioscrobbler_tsv_file
  219. if self.user.id == 1:
  220. fix_profile_historic_timezones(self.user.profile)
  221. if self.processed_finished and not force:
  222. logger.info(
  223. f"{self} already processed on {self.processed_finished}"
  224. )
  225. return
  226. self.mark_started()
  227. scrobbles = import_audioscrobbler_tsv_file(
  228. self.upload_file_path, self.user.id
  229. )
  230. self.record_log(scrobbles)
  231. self.mark_finished()
  232. class LastFmImport(BaseFileImportMixin):
  233. class Meta:
  234. verbose_name = "Last.FM Import"
  235. @property
  236. def import_type(self) -> str:
  237. return "LastFM"
  238. def get_absolute_url(self):
  239. return reverse(
  240. "scrobbles:lastfm-import-detail", kwargs={"slug": self.uuid}
  241. )
  242. def process(self, import_all=False):
  243. """Import scrobbles found on LastFM"""
  244. if self.user.id == 1:
  245. fix_profile_historic_timezones(self.user.profile)
  246. if self.processed_finished:
  247. logger.info(
  248. f"{self} already processed on {self.processed_finished}"
  249. )
  250. return
  251. last_import = None
  252. if not import_all:
  253. try:
  254. last_import = LastFmImport.objects.exclude(id=self.id).last()
  255. except:
  256. pass
  257. if not import_all and not last_import:
  258. logger.warn(
  259. "No previous import, to import all Last.fm scrobbles, pass import_all=True"
  260. )
  261. return
  262. lastfm = LastFM(self.user)
  263. last_processed = None
  264. if last_import:
  265. last_processed = last_import.processed_finished
  266. self.mark_started()
  267. scrobbles = lastfm.import_from_lastfm(last_processed)
  268. self.record_log(scrobbles)
  269. self.mark_finished()
  270. class RetroarchImport(BaseFileImportMixin):
  271. class Meta:
  272. verbose_name = "Retroarch Import"
  273. @property
  274. def import_type(self) -> str:
  275. return "Retroarch"
  276. def get_absolute_url(self):
  277. return reverse(
  278. "scrobbles:retroarch-import-detail", kwargs={"slug": self.uuid}
  279. )
  280. def process(self, import_all=False, force=False):
  281. """Import scrobbles found on Retroarch"""
  282. if self.user.id == 1:
  283. fix_profile_historic_timezones(self.user.profile)
  284. if self.processed_finished and not force:
  285. logger.info(
  286. f"{self} already processed on {self.processed_finished}"
  287. )
  288. return
  289. if force:
  290. logger.info(f"You told me to force import from Retroarch")
  291. if not self.user.profile.retroarch_path:
  292. logger.info(
  293. "Tying to import Retroarch logs, but user has no retroarch_path configured"
  294. )
  295. self.mark_started()
  296. scrobbles = retroarch.import_retroarch_lrtl_files(
  297. self.user.profile.retroarch_path,
  298. self.user.id,
  299. )
  300. self.record_log(scrobbles)
  301. self.mark_finished()
  302. class ChartRecord(TimeStampedModel):
  303. """Sort of like a materialized view for what we could dynamically generate,
  304. but would kill the DB as it gets larger. Collects time-based records
  305. generated by a cron-like archival job
  306. 1972 by Josh Rouse - #3 in 2023, January
  307. """
  308. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  309. rank = models.IntegerField(db_index=True)
  310. count = models.IntegerField(default=0)
  311. year = models.IntegerField(**BNULL)
  312. month = models.IntegerField(**BNULL)
  313. week = models.IntegerField(**BNULL)
  314. day = models.IntegerField(**BNULL)
  315. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  316. series = models.ForeignKey(Series, on_delete=models.DO_NOTHING, **BNULL)
  317. artist = models.ForeignKey(Artist, on_delete=models.DO_NOTHING, **BNULL)
  318. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  319. period_start = models.DateTimeField(**BNULL)
  320. period_end = models.DateTimeField(**BNULL)
  321. def save(self, *args, **kwargs):
  322. profile = self.user.profile
  323. if self.week:
  324. # set start and end to start and end of week
  325. period = datetime.date.fromisocalendar(self.year, self.week, 1)
  326. self.period_start = start_of_week(period, profile)
  327. self.period_start = end_of_week(period, profile)
  328. if self.day:
  329. period = datetime.datetime(self.year, self.month, self.day)
  330. self.period_start = start_of_day(period, profile)
  331. self.period_end = end_of_day(period, profile)
  332. if self.month and not self.day:
  333. period = datetime.datetime(self.year, self.month, 1)
  334. self.period_start = start_of_month(period, profile)
  335. self.period_end = end_of_month(period, profile)
  336. super(ChartRecord, self).save(*args, **kwargs)
  337. @property
  338. def media_obj(self):
  339. return self.item.get_concrete_model()
  340. @property
  341. def month_str(self) -> str:
  342. month_str = ""
  343. if self.month:
  344. month_str = calendar.month_name[self.month]
  345. return month_str
  346. @property
  347. def day_str(self) -> str:
  348. day_str = ""
  349. if self.day:
  350. day_str = str(self.day)
  351. return day_str
  352. @property
  353. def week_str(self) -> str:
  354. week_str = ""
  355. if self.week:
  356. week_str = str(self.week)
  357. return "Week " + week_str
  358. @property
  359. def period(self) -> str:
  360. period = str(self.year)
  361. if self.month:
  362. period = " ".join([self.month_str, period])
  363. if self.week:
  364. period = " ".join([self.week_str, period])
  365. if self.day:
  366. period = " ".join([self.day_str, period])
  367. return period
  368. @property
  369. def period_type(self) -> str:
  370. period = "year"
  371. if self.month:
  372. period = "month"
  373. if self.week:
  374. period = "week"
  375. if self.day:
  376. period = "day"
  377. return period
  378. def __str__(self):
  379. title = f"#{self.rank} in {self.period}"
  380. if self.day or self.week:
  381. title = f"#{self.rank} on {self.period}"
  382. return title
  383. def link(self):
  384. get_params = f"?date={self.year}"
  385. if self.week:
  386. get_params = get_params = get_params + f"-W{self.week}"
  387. if self.month:
  388. get_params = get_params = get_params + f"-{self.month}"
  389. if self.day:
  390. get_params = get_params = get_params + f"-{self.day}"
  391. if self.artist:
  392. get_params = get_params + "&media=Artist"
  393. return reverse("scrobbles:charts-home") + get_params
  394. @classmethod
  395. def build(cls, user, **kwargs):
  396. build_charts(user=user, **kwargs)
  397. @classmethod
  398. def for_year(cls, user, year):
  399. return cls.objects.filter(year=year, user=user)
  400. @classmethod
  401. def for_month(cls, user, year, month):
  402. return cls.objects.filter(year=year, month=month, user=user)
  403. @classmethod
  404. def for_day(cls, user, year, day, month):
  405. return cls.objects.filter(year=year, month=month, day=day, user=user)
  406. @classmethod
  407. def for_week(cls, user, year, week):
  408. return cls.objects.filter(year=year, week=week, user=user)
  409. class Scrobble(TimeStampedModel):
  410. """A scrobble tracks played media items by a user."""
  411. uuid = models.UUIDField(editable=False, **BNULL)
  412. item = models.ForeignKey(
  413. ScrobblableItem,
  414. null=True,
  415. on_delete=models.CASCADE,
  416. related_name="scrobble_set",
  417. )
  418. media_type = models.CharField(
  419. max_length=14,
  420. choices=constants.MediaType.choices,
  421. db_index=True,
  422. editable=False,
  423. )
  424. user = models.ForeignKey(
  425. User, blank=True, null=True, on_delete=models.DO_NOTHING
  426. )
  427. # Time keeping
  428. timestamp = models.DateTimeField(**BNULL)
  429. stop_timestamp = models.DateTimeField(**BNULL)
  430. playback_position_seconds = models.IntegerField(**BNULL)
  431. # Status indicators
  432. is_paused = models.BooleanField(default=False)
  433. played_to_completion = models.BooleanField(default=False)
  434. in_progress = models.BooleanField(default=True)
  435. # Metadata
  436. source = models.CharField(max_length=255, **BNULL)
  437. log = models.JSONField(
  438. **BNULL,
  439. default=dict,
  440. encoder=logdata.ScrobbleLogDataEncoder,
  441. decoder=logdata.ScrobbleLogDataDecoder,
  442. )
  443. timezone = models.CharField(max_length=50, **BNULL)
  444. # Fields for keeping track of video game data
  445. videogame_save_data = models.FileField(
  446. upload_to="scrobbles/videogame_save_data/", **BNULL
  447. )
  448. gpx_file = models.FileField(upload_to="scrobbles/gpx_file/", **BNULL)
  449. screenshot = models.ImageField(
  450. upload_to="scrobbles/videogame_screenshot/", **BNULL
  451. )
  452. screenshot_small = ImageSpecField(
  453. source="screenshot",
  454. processors=[ResizeToFit(100, 100)],
  455. format="JPEG",
  456. options={"quality": 60},
  457. )
  458. screenshot_medium = ImageSpecField(
  459. source="screenshot",
  460. processors=[ResizeToFit(300, 300)],
  461. format="JPEG",
  462. options={"quality": 75},
  463. )
  464. @cached_property
  465. def media_obj(self):
  466. """
  467. Return the concrete media instance (Book, Video, Track, etc.).
  468. Cached for the lifetime of this model instance.
  469. """
  470. if not self.item:
  471. return None
  472. return self.item.get_concrete_instance()
  473. class Meta:
  474. ordering = ["-timestamp"]
  475. indexes = [
  476. models.Index(fields=["timestamp"]),
  477. models.Index(fields=["media_type"]),
  478. ]
  479. @classmethod
  480. def for_year(cls, user, year):
  481. return cls.objects.filter(timestamp__year=year, user=user).order_by(
  482. "-timestamp"
  483. )
  484. @classmethod
  485. def for_month(cls, user, year, month):
  486. return cls.objects.filter(
  487. timestamp__year=year, timestamp__month=month, user=user
  488. ).order_by("-timestamp")
  489. @classmethod
  490. def for_day(cls, user, year, month, day):
  491. return cls.objects.filter(
  492. timestamp__year=year,
  493. timestamp__month=month,
  494. timestamp__day=day,
  495. user=user,
  496. ).order_by("-timestamp")
  497. @classmethod
  498. def for_week(cls, user, year, week):
  499. return cls.objects.filter(
  500. timestamp__year=year, timestamp__week=week, user=user
  501. ).order_by("-timestamp")
  502. @classmethod
  503. def as_dict_by_type(cls, scrobble_qs: models.QuerySet) -> dict:
  504. scrobbles_by_type = defaultdict(list)
  505. for scrobble in scrobble_qs:
  506. scrobbles_by_type[scrobble.media_type].append(scrobble)
  507. if not scrobbles_by_type.get(scrobble.media_type + "_count"):
  508. scrobbles_by_type[scrobble.media_type + "_count"] = 0
  509. scrobbles_by_type[scrobble.media_type + "_count"] += 1
  510. if not scrobbles_by_type.get(scrobble.media_type + "_time"):
  511. scrobbles_by_type[scrobble.media_type + "_time"] = 0
  512. scrobbles_by_type[scrobble.media_type + "_time"] += int(
  513. (scrobble.elapsed_time)
  514. )
  515. # Remove any locations without titles
  516. if "GeoLocation" in scrobbles_by_type.keys():
  517. for loc_scrobble in scrobbles_by_type["GeoLocation"]:
  518. if not loc_scrobble.media_obj.title:
  519. scrobbles_by_type["GeoLocation"].remove(loc_scrobble)
  520. scrobbles_by_type["GeoLocation_count"] -= 1
  521. return scrobbles_by_type
  522. @classmethod
  523. def in_progress_for_user(cls, user_id: int) -> models.QuerySet:
  524. return cls.objects.filter(
  525. user=user_id,
  526. in_progress=True,
  527. played_to_completion=False,
  528. is_paused=False,
  529. )
  530. @property
  531. def last_serial_scrobble(self) -> "Scrobble | None":
  532. from scrobbles.models import Scrobble
  533. try:
  534. if self.logdata and self.logdata.serial_scrobble_id:
  535. return Scrobble.objects.filter(
  536. id=self.logdata.serial_scrobble_id
  537. ).first()
  538. except AttributeError:
  539. return
  540. @property
  541. def finish_url(self) -> str:
  542. return reverse("scrobbles:finish", kwargs={"uuid": self.uuid})
  543. def save(self, *args, **kwargs):
  544. if not self.uuid:
  545. self.uuid = uuid4()
  546. if self.item:
  547. self.media_type = self.item.get_media_type()
  548. if not self.timezone:
  549. timezone = settings.TIME_ZONE
  550. if self.user and self.user.profile:
  551. timezone = self.user.profile.timezone
  552. self.timezone = timezone
  553. # Microseconds mess up Django's filtering, and we don't need be that specific
  554. if self.timestamp:
  555. self.timestamp = self.timestamp.replace(microsecond=0)
  556. return super().save(*args, **kwargs)
  557. def get_absolute_url(self):
  558. if not self.uuid:
  559. self.uuid = uuid4()
  560. self.save()
  561. return reverse("scrobbles:detail", kwargs={"uuid": self.uuid})
  562. def push_to_archivebox(self):
  563. pushable_media = hasattr(
  564. self.media_obj, "push_to_archivebox"
  565. ) and callable(self.media_obj.push_to_archivebox)
  566. if pushable_media and self.user.profile.archivebox_url:
  567. try:
  568. self.media_obj.push_to_archivebox(
  569. url=self.user.profile.archivebox_url,
  570. username=self.user.profile.archivebox_username,
  571. password=self.user.profile.archivebox_password,
  572. )
  573. except Exception:
  574. logger.info(
  575. "Failed to push URL to archivebox",
  576. extra={
  577. "archivebox_url": self.user.profile.archivebox_url,
  578. "archivebox_username": self.user.profile.archivebox_username,
  579. },
  580. )
  581. @property
  582. def logdata(self) -> Optional[logdata.BaseLogData]:
  583. if self.media_obj:
  584. logdata_cls = self.media_obj.logdata_cls
  585. else:
  586. logdata_cls = logdata.BaseLogData
  587. log_dict = self.log
  588. if isinstance(self.log, str):
  589. # There's nothing stopping django from saving a string in a JSONField :(
  590. logger.warning(
  591. "[scrobbles] Received string in JSON data in log",
  592. extra={"log": self.log},
  593. )
  594. log_dict = json.loads(self.log)
  595. if not log_dict:
  596. log_dict = {}
  597. try:
  598. return logdata_cls(**log_dict)
  599. except ParseError as e:
  600. logger.warning(
  601. "Could not parse log data",
  602. extra={
  603. "log_dict": log_dict,
  604. "scrobble_id": self.id,
  605. "error": e,
  606. },
  607. )
  608. return logdata_cls()
  609. except TypeError as e:
  610. return logdata_cls()
  611. def redirect_url(self, user_id) -> str:
  612. user = User.objects.filter(id=user_id).first()
  613. redirect_url = self.media_obj.get_absolute_url()
  614. if (
  615. self.media_type == constants.MediaType.WEBPAGE
  616. and user
  617. and user.profile.redirect_to_webpage
  618. ):
  619. logger.info(f"Redirecting to {self.media_obj.url}")
  620. redirect_url = self.media_obj.url
  621. if (
  622. self.media_type == constants.MediaType.VIDEO
  623. and self.media_obj.youtube_id
  624. ):
  625. redirect_url = self.media_obj.youtube_link
  626. return redirect_url
  627. @property
  628. def tzinfo(self):
  629. return ZoneInfo(self.timezone)
  630. @property
  631. def local_timestamp(self):
  632. return timezone.localtime(self.timestamp, timezone=self.tzinfo)
  633. @property
  634. def local_stop_timestamp(self):
  635. if self.stop_timestamp:
  636. return timezone.localtime(
  637. self.stop_timestamp, timezone=self.tzinfo
  638. )
  639. @property
  640. def scrobble_media_key(self) -> str:
  641. return media_class_to_foreign_key(self.media_type) + "_id"
  642. @property
  643. def status(self) -> str:
  644. if self.is_paused:
  645. return "paused"
  646. if self.played_to_completion:
  647. return "finished"
  648. if self.in_progress:
  649. return "in-progress"
  650. return "zombie"
  651. @property
  652. def is_stale(self) -> bool:
  653. """Mark scrobble as stale if it's been more than an hour since it was updated
  654. Effectively, this allows 'resuming' a video scrobble within an hour of starting it.
  655. """
  656. is_stale = False
  657. now = timezone.now()
  658. seconds_since_last_update = (now - self.modified).total_seconds()
  659. if seconds_since_last_update >= self.media_obj.SECONDS_TO_STALE:
  660. is_stale = True
  661. return is_stale
  662. @property
  663. def previous(self) -> "Scrobble":
  664. return (
  665. self.media_obj.scrobble_set.order_by("-timestamp")
  666. .filter(timestamp__lt=self.timestamp)
  667. .first()
  668. )
  669. @property
  670. def next(self) -> "Scrobble":
  671. return (
  672. self.media_obj.scrobble_set.order_by("timestamp")
  673. .filter(timestamp__gt=self.timestamp)
  674. .first()
  675. )
  676. @property
  677. def previous_by_media(self) -> "Scrobble":
  678. return (
  679. Scrobble.objects.filter(
  680. media_type=self.media_type,
  681. user=self.user,
  682. timestamp__lt=self.timestamp,
  683. )
  684. .order_by("-timestamp")
  685. .first()
  686. )
  687. @property
  688. def next_by_media(self) -> "Scrobble":
  689. return (
  690. Scrobble.objects.filter(
  691. media_type=self.media_type,
  692. user=self.user,
  693. timestamp__gt=self.timestamp,
  694. )
  695. .order_by("-timestamp")
  696. .first()
  697. )
  698. @property
  699. def previous_by_user(self) -> "Scrobble":
  700. return (
  701. Scrobble.objects.order_by("-timestamp")
  702. .filter(timestamp__lt=self.timestamp)
  703. .first()
  704. )
  705. @property
  706. def next_by_user(self) -> "Scrobble":
  707. return (
  708. Scrobble.objects.order_by("-timestamp")
  709. .filter(timestamp__gt=self.timestamp)
  710. .first()
  711. )
  712. @property
  713. def session_pages_read(self) -> Optional[int]:
  714. pages_read = 0
  715. if self.log and isinstance(self.log, dict):
  716. pages_read = self.log.get("pages_read", 0)
  717. return pages_read
  718. @property
  719. def long_play_complete(self) -> bool:
  720. if not self.item.is_long_play_media:
  721. raise Exception("Is not long play media")
  722. if not self.log:
  723. return True
  724. return self.log.get("long_play_complete", False)
  725. @property
  726. def elapsed_time(self) -> int | None:
  727. if self.played_to_completion:
  728. if self.playback_position_seconds:
  729. return self.playback_position_seconds
  730. if self.media_obj and self.media_obj.run_time_seconds:
  731. return self.media_obj.run_time_seconds
  732. return (timezone.now() - self.timestamp).seconds
  733. @property
  734. def percent_played(self) -> int:
  735. if not self.media_obj:
  736. return 0
  737. if self.media_obj and not self.media_obj.run_time_seconds:
  738. return 100
  739. if not self.playback_position_seconds and self.played_to_completion:
  740. return 100
  741. playback_seconds = self.playback_position_seconds
  742. if not playback_seconds:
  743. playback_seconds = self.elapsed_time
  744. run_time_secs = self.media_obj.run_time_seconds
  745. percent = int((playback_seconds / run_time_secs) * 100)
  746. if self.item.is_long_play_media:
  747. long_play_secs = 0
  748. if self.previous and not self.previous.long_play_complete:
  749. long_play_secs = self.previous.long_play_seconds or 0
  750. percent = int(
  751. ((playback_seconds + long_play_secs) / run_time_secs) * 100
  752. )
  753. return percent
  754. @property
  755. def probably_still_in_progress(self) -> bool:
  756. """Add our start time to our media run time to get when we expect to
  757. Audio tracks should be given a second or two of grace, videos should
  758. be given closer to 30 minutes, because the odds of watching it back to
  759. back are very slim.
  760. """
  761. is_in_progress = False
  762. padding_seconds = MEDIA_END_PADDING_SECONDS.get(self.media_type)
  763. if not padding_seconds:
  764. return is_in_progress
  765. if not self.media_obj:
  766. logger.info(
  767. "[scrobbling] scrobble has no media obj",
  768. extra={
  769. "media_id": self.media_obj,
  770. "scrobble_id": self.id,
  771. "media_type": self.media_type,
  772. "probably_still_in_progress": is_in_progress,
  773. },
  774. )
  775. return is_in_progress
  776. if not self.media_obj.run_time_seconds:
  777. logger.info(
  778. "[scrobbling] media has no run time seconds, cannot calculate end",
  779. extra={
  780. "media_id": self.media_obj.id,
  781. "scrobble_id": self.id,
  782. "media_type": self.media_type,
  783. "probably_still_in_progress": is_in_progress,
  784. },
  785. )
  786. return is_in_progress
  787. expected_end = self.timestamp + datetime.timedelta(
  788. seconds=self.media_obj.run_time_seconds
  789. )
  790. expected_end_padded = expected_end + datetime.timedelta(
  791. seconds=padding_seconds
  792. )
  793. # Take our start time, add our media length and an extra 30 min (1800s) is it still in the future? keep going
  794. is_in_progress = expected_end_padded > pendulum.now()
  795. logger.info(
  796. "[scrobbling] checking if we're probably still playing",
  797. extra={
  798. "media_id": self.media_obj.id,
  799. "scrobble_id": self.id,
  800. "media_type": self.media_type,
  801. "probably_still_in_progress": is_in_progress,
  802. },
  803. )
  804. return is_in_progress
  805. @property
  806. def can_be_updated(self) -> bool:
  807. if self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values() and self.source != "readcomicsonline.ru":
  808. logger.info(
  809. "[scrobbling] cannot be updated, long play media",
  810. extra={
  811. "media_id": self.media_obj.id,
  812. "scrobble_id": self.id,
  813. "media_type": self.media_type,
  814. },
  815. )
  816. return False
  817. if self.percent_played >= 100 and not self.probably_still_in_progress:
  818. logger.info(
  819. "[scrobbling] cannot be updated, existing scrobble is 100% played",
  820. extra={
  821. "media_id": self.media_obj.id,
  822. "scrobble_id": self.id,
  823. "media_type": self.media_type,
  824. },
  825. )
  826. return False
  827. if self.is_stale:
  828. logger.info(
  829. "[scrobbling] cannot be udpated, stale",
  830. extra={
  831. "media_id": self.media_obj.id,
  832. "scrobble_id": self.id,
  833. "media_type": self.media_type,
  834. },
  835. )
  836. return False
  837. logger.info(
  838. "[scrobbling] can be updated",
  839. extra={
  840. "media_id": self.media_obj.id,
  841. "scrobble_id": self.id,
  842. "media_type": self.media_type,
  843. },
  844. )
  845. return True
  846. @classmethod
  847. def by_date(cls, media_type: str = "Track"):
  848. cls.objects.filter(media_type=media_type).values(
  849. "timestamp__date"
  850. ).annotate(count=models.Count("id")).values(
  851. "timestamp__date", "count"
  852. ).order_by(
  853. "-count",
  854. )
  855. def __str__(self):
  856. return f"Scrobble of {self.media_obj} ({self.timestamp})"
  857. def calc_reading_duration(self) -> int:
  858. duration = 0
  859. if self.logdata.page_data:
  860. for k, v in self.logdata.page_data.items():
  861. duration += v.get("duration")
  862. return duration
  863. def calc_pages_read(self) -> int:
  864. pages_read = 0
  865. if self.logdata.page_data:
  866. pages = [int(k) for k in self.logdata.page_data.keys()]
  867. pages.sort()
  868. if len(pages) == 1:
  869. pages_read = 1
  870. elif len(pages) >= 2:
  871. pages_read += pages[-1] - pages[0]
  872. else:
  873. pages_read = pages[-1] - pages[0]
  874. return pages_read
  875. @property
  876. def last_page_read(self) -> int:
  877. last_page = 0
  878. if self.logdata.page_data:
  879. pages = [int(k) for k in self.logdata.page_data.keys()]
  880. pages.sort()
  881. last_page = pages[-1]
  882. return last_page
  883. @property
  884. def get_media_source_url(self) -> str:
  885. url = ""
  886. if self.media_type == "Website":
  887. url = self.media_obj.url
  888. if self.media_type == "Task":
  889. url = self.media_obj.source_url_for_user(self.user)
  890. return url
  891. @classmethod
  892. def create_or_update(
  893. cls, item, user_id: int, scrobble_data: dict, **kwargs
  894. ) -> "Scrobble":
  895. skip_in_progress_check = kwargs.get("skip_in_progress_check", False)
  896. read_log_page = kwargs.get("read_log_page", None)
  897. mtype = item.get_media_type()
  898. # Find our last scrobble of this media item (track, video, etc)
  899. scrobble = cls.objects.filter(item=item, user_id=user_id).order_by("-timestamp").first()
  900. source = scrobble_data.get("source", "Vrobbler")
  901. # GeoLocations are a special case scrobble
  902. if mtype == constants.MediaType.GEO_LOCATION:
  903. return cls.create_or_update_location(item, scrobble_data, user_id)
  904. if not skip_in_progress_check or read_log_page:
  905. logger.info(
  906. f"[create_or_update] check for existing scrobble to update ",
  907. extra={
  908. "scrobble_id": scrobble.id if scrobble else None,
  909. "media_type": mtype,
  910. "item_id": item.id,
  911. "scrobble_data": scrobble_data,
  912. },
  913. )
  914. scrobble_data["playback_status"] = scrobble_data.pop(
  915. "status", None
  916. )
  917. # If it's marked as stopped, send it through our update mechanism, which will complete it
  918. if scrobble and (
  919. scrobble.can_be_updated
  920. or (read_log_page and scrobble.can_be_updated)
  921. or scrobble_data["playback_status"] == "stopped"
  922. ):
  923. if read_log_page:
  924. page_list = scrobble.log.get("page_data", [])
  925. if page_list:
  926. for page in page_list:
  927. if not page.get("end_ts", None):
  928. page["end_ts"] = int(timezone.now().timestamp())
  929. page["duration"] = page["end_ts"] - page.get("start_ts")
  930. page_list.append(
  931. BookPageLogData(
  932. page_number=read_log_page,
  933. start_ts=int(timezone.now().timestamp())
  934. )
  935. )
  936. scrobble.log["page_data"] = page_list
  937. scrobble.save(update_fields=["log"])
  938. elif "log" in scrobble_data.keys() and scrobble.log:
  939. scrobble_data["log"] = scrobble.log | scrobble_data["log"]
  940. return scrobble.update(scrobble_data)
  941. # Discard status before creating
  942. scrobble_data.pop("playback_status")
  943. if read_log_page:
  944. scrobble_data["log"] = BookLogData(page_data=[BookPageLogData(page_number=read_log_page, start_ts=int(timezone.now().timestamp()))])
  945. logger.info(
  946. f"[scrobbling] creating new scrobble",
  947. extra={
  948. "scrobble_id": scrobble.id if scrobble else None,
  949. "media_type": mtype,
  950. "item_id": item.id,
  951. "source": source,
  952. },
  953. )
  954. if mtype == constants.MediaType.FOOD and not scrobble_data.get("log", {}).get("calories", None):
  955. if item.calories:
  956. scrobble_data["log"] = FoodLogData(calories=item.calories)
  957. scrobble_data["item_id"] = item.id
  958. scrobble = cls.create(scrobble_data)
  959. return scrobble
  960. @classmethod
  961. def create_or_update_location(
  962. cls, location: GeoLocation, scrobble_data: dict, user_id: int
  963. ) -> "Scrobble":
  964. """Location is special type, where the current scrobble for a user is always the
  965. current active scrobble, and we only finish it a move on if we get a new location
  966. that is far enough (and far enough over the last three past scrobbles) to have
  967. actually moved.
  968. """
  969. key = media_class_to_foreign_key(location.__class__.__name__)
  970. scrobble_data[key + "_id"] = location.id
  971. scrobble = (
  972. cls.objects.filter(
  973. media_type=constants.MediaType.GEO_LOCATION,
  974. user_id=user_id,
  975. timestamp__lte=scrobble_data.get("timestamp"),
  976. )
  977. .order_by("-timestamp")
  978. .first()
  979. )
  980. logger.info(
  981. f"[scrobbling] fetching last location scrobble",
  982. extra={
  983. "scrobble_id": scrobble.id if scrobble else None,
  984. "media_type": constants.MediaType.GEO_LOCATION,
  985. "media_id": location.id,
  986. "scrobble_data": scrobble_data,
  987. },
  988. )
  989. if not scrobble:
  990. logger.info(
  991. f"[scrobbling] finished - no existing location scrobbles found",
  992. extra={
  993. "media_id": location.id,
  994. "media_type": constants.MediaType.GEO_LOCATION,
  995. },
  996. )
  997. return cls.create(scrobble_data)
  998. if scrobble.media_obj == location:
  999. logger.info(
  1000. f"[scrobbling] finished - same location - not moved",
  1001. extra={
  1002. "media_type": constants.MediaType.GEO_LOCATION,
  1003. "media_id": location.id,
  1004. "scrobble_id": scrobble.id,
  1005. "scrobble_media_id": scrobble.media_obj.id,
  1006. },
  1007. )
  1008. return scrobble
  1009. has_moved = location.has_moved(scrobble.media_obj)
  1010. logger.info(
  1011. f"[scrobbling] checking - has last location has moved?",
  1012. extra={
  1013. "scrobble_id": scrobble.id,
  1014. "scrobble_media_id": scrobble.media_obj.id,
  1015. "media_type": constants.MediaType.GEO_LOCATION,
  1016. "media_id": location.id,
  1017. "has_moved": has_moved,
  1018. },
  1019. )
  1020. if not has_moved:
  1021. logger.info(
  1022. f"[scrobbling] finished - not from old location - not moved",
  1023. extra={
  1024. "scrobble_id": scrobble.id,
  1025. "media_id": location.id,
  1026. "media_type": constants.MediaType.GEO_LOCATION,
  1027. "old_media__id": scrobble.media_obj.id,
  1028. },
  1029. )
  1030. return scrobble
  1031. if existing_locations := location.in_proximity(named=True):
  1032. existing_location = existing_locations.first()
  1033. ts = int(pendulum.now().timestamp())
  1034. scrobble.log[
  1035. ts
  1036. ] = f"Location {location.id} too close to this scrobble"
  1037. scrobble.save(update_fields=["log"])
  1038. logger.info(
  1039. f"[scrobbling] finished - found existing named location",
  1040. extra={
  1041. "media_id": location.id,
  1042. "media_type": constants.MediaType.GEO_LOCATION,
  1043. "old_media_id": existing_location.id,
  1044. },
  1045. )
  1046. return scrobble
  1047. scrobble.stop(force_finish=True)
  1048. scrobble = cls.create(scrobble_data)
  1049. logger.info(
  1050. f"[scrobbling] finished - created for location",
  1051. extra={
  1052. "scrobble_id": scrobble.id,
  1053. "media_id": location.id,
  1054. "scrobble_data": scrobble_data,
  1055. "media_type": constants.MediaType.GEO_LOCATION,
  1056. "source": scrobble_data.get("source"),
  1057. },
  1058. )
  1059. return scrobble
  1060. def update(self, scrobble_data: dict) -> "Scrobble":
  1061. # Status is a field we get from Mopidy, which refuses to poll us
  1062. playback_status = scrobble_data.pop("playback_status", None)
  1063. logger.info(
  1064. "[update] called",
  1065. extra={
  1066. "scrobble_id": self.id,
  1067. "scrobble_data": scrobble_data,
  1068. "media_type": self.media_type,
  1069. "playback_status": playback_status,
  1070. },
  1071. )
  1072. if self.beyond_completion_percent:
  1073. playback_status = "stopped"
  1074. if playback_status == "stopped":
  1075. self.stop()
  1076. if playback_status == "paused":
  1077. self.pause()
  1078. if playback_status == "resumed":
  1079. self.resume()
  1080. if playback_status != "resumed":
  1081. scrobble_data["stop_timestamp"] = (
  1082. scrobble_data.pop("timestamp", None) or timezone.now()
  1083. )
  1084. # timestamp should be more-or-less immutable
  1085. scrobble_data.pop("timestamp", None)
  1086. update_fields = []
  1087. for key, value in scrobble_data.items():
  1088. setattr(self, key, value)
  1089. update_fields.append(key)
  1090. self.save(update_fields=update_fields)
  1091. return self
  1092. @classmethod
  1093. def create(
  1094. cls,
  1095. scrobble_data: dict,
  1096. ) -> "Scrobble":
  1097. scrobble = cls.objects.create(**scrobble_data)
  1098. ScrobbleNtfyNotification(scrobble).send()
  1099. return scrobble
  1100. def stop(self, timestamp=None, force_finish=False) -> None:
  1101. self.stop_timestamp = timestamp or timezone.now()
  1102. self.played_to_completion = True
  1103. self.in_progress = False
  1104. if not self.playback_position_seconds:
  1105. self.playback_position_seconds = int(
  1106. (self.stop_timestamp - self.timestamp).total_seconds()
  1107. )
  1108. self.save(
  1109. update_fields=[
  1110. "in_progress",
  1111. "played_to_completion",
  1112. "stop_timestamp",
  1113. "playback_position_seconds",
  1114. ]
  1115. )
  1116. class_name = self.media_obj.__class__.__name__
  1117. if class_name in LONG_PLAY_MEDIA.values():
  1118. self.finish_long_play()
  1119. if class_name == "Book":
  1120. self.calculate_reading_stats()
  1121. logger.info(
  1122. f"[scrobbling] stopped",
  1123. extra={
  1124. "scrobble_id": self.id,
  1125. "media_id": self.media_obj.id,
  1126. "media_type": self.media_type,
  1127. "source": self.source,
  1128. },
  1129. )
  1130. def pause(self) -> None:
  1131. if self.is_paused:
  1132. logger.warning(f"{self.id} - already paused - {self.source}")
  1133. return
  1134. self.is_paused = True
  1135. self.save(update_fields=["is_paused"])
  1136. logger.info(
  1137. f"[scrobbling] paused",
  1138. extra={
  1139. "scrobble_id": self.id,
  1140. "media_type": self.media_type,
  1141. "source": self.source,
  1142. },
  1143. )
  1144. def resume(self) -> None:
  1145. if self.is_paused or not self.in_progress:
  1146. self.is_paused = False
  1147. self.in_progress = True
  1148. self.save(update_fields=["is_paused", "in_progress"])
  1149. logger.info(
  1150. f"[scrobbling] resumed",
  1151. extra={
  1152. "scrobble_id": self.id,
  1153. "media_type": self.media_type,
  1154. "source": self.source,
  1155. },
  1156. )
  1157. def cancel(self) -> None:
  1158. self.delete()
  1159. def update_ticks(self, data) -> None:
  1160. self.playback_position_seconds = data.get("playback_position_seconds")
  1161. self.save(update_fields=["playback_position_seconds"])
  1162. def finish_long_play(self):
  1163. seconds_elapsed = (timezone.now() - self.timestamp).seconds
  1164. past_seconds = 0
  1165. # Set our playback seconds, and calc long play seconds
  1166. self.playback_position_seconds = seconds_elapsed
  1167. if self.previous:
  1168. past_seconds = self.previous.long_play_seconds or 0
  1169. self.long_play_seconds = past_seconds + seconds_elapsed
  1170. # Long play scrobbles are always finished when we say they are
  1171. self.played_to_completion = True
  1172. self.save(
  1173. update_fields=[
  1174. "playback_position_seconds",
  1175. "played_to_completion",
  1176. "long_play_seconds",
  1177. ]
  1178. )
  1179. logger.info(
  1180. f"[scrobbling] finishing long play",
  1181. extra={
  1182. "scrobble_id": self.id,
  1183. },
  1184. )
  1185. @property
  1186. def beyond_completion_percent(self) -> bool:
  1187. """Returns true if our media is beyond our completion percent, unless
  1188. our type is geolocation in which case we always return false
  1189. """
  1190. beyond_completion = (
  1191. self.percent_played >= self.media_obj.COMPLETION_PERCENT
  1192. )
  1193. if self.media_type == "GeoLocation":
  1194. logger.info(
  1195. f"[scrobbling] locations are ONLY completed when new one is created",
  1196. extra={
  1197. "scrobble_id": self.id,
  1198. "media_type": self.media_type,
  1199. "beyond_completion": beyond_completion,
  1200. },
  1201. )
  1202. beyond_completion = False
  1203. return beyond_completion
  1204. def calculate_reading_stats(self, commit=True):
  1205. # --- Sort safely by numeric page_number ---
  1206. def safe_page_number(entry):
  1207. try:
  1208. return int(getattr("page_number", entry), 0)
  1209. except (ValueError, TypeError):
  1210. return float("inf") # push invalid entries to the end
  1211. page_data = self.log.get("page_data")
  1212. if not page_data:
  1213. logger.warning("No page data found to calculate")
  1214. return
  1215. if isinstance(page_data, dict):
  1216. logger.warning("Page data is dict, migrate koreader data")
  1217. return
  1218. page_data.sort(key=safe_page_number)
  1219. # --- Extract valid numeric page numbers ---
  1220. valid_pages = []
  1221. for page in page_data:
  1222. try:
  1223. valid_pages.append(int(page["page_number"]))
  1224. except (ValueError, TypeError):
  1225. continue
  1226. # --- Compute stats ---
  1227. if valid_pages:
  1228. self.log["page_start"] = min(valid_pages)
  1229. self.log["page_end"] = max(valid_pages)
  1230. self.log["pages_read"] = len(set(valid_pages))
  1231. if commit:
  1232. self.save(update_fields=["log"])