models.py 50 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533
  1. import calendar
  2. import datetime
  3. import json
  4. import logging
  5. from collections import defaultdict
  6. from typing import Optional
  7. from uuid import uuid4
  8. from zoneinfo import ZoneInfo
  9. import pendulum
  10. import pytz
  11. from beers.models import Beer
  12. from boardgames.models import BoardGame
  13. from books.koreader import process_koreader_sqlite_file
  14. from books.models import Book, Paper, BookPageLogData, BookLogData
  15. from bricksets.models import BrickSet
  16. from dataclass_wizard.errors import ParseError
  17. from django.conf import settings
  18. from django.contrib.auth import get_user_model
  19. from django.core.files import File
  20. from django.db import models
  21. from django.urls import reverse
  22. from django.utils import timezone
  23. from django_extensions.db.models import TimeStampedModel
  24. from foods.models import Food, FoodLogData
  25. from imagekit.models import ImageSpecField
  26. from imagekit.processors import ResizeToFit
  27. from lifeevents.models import LifeEvent
  28. from locations.models import GeoLocation
  29. from moods.models import Mood
  30. from music.models import Artist, Track
  31. from podcasts.models import PodcastEpisode
  32. from profiles.utils import (
  33. end_of_day,
  34. end_of_month,
  35. end_of_week,
  36. fix_profile_historic_timezones,
  37. start_of_day,
  38. start_of_month,
  39. start_of_week,
  40. )
  41. from puzzles.models import Puzzle
  42. from scrobbles import dataclasses as logdata
  43. from scrobbles.constants import LONG_PLAY_MEDIA, MEDIA_END_PADDING_SECONDS
  44. from scrobbles.importers.lastfm import LastFM
  45. from scrobbles.notifications import ScrobbleNtfyNotification
  46. from scrobbles.stats import build_charts
  47. from scrobbles.utils import get_file_md5_hash, media_class_to_foreign_key
  48. from sports.models import SportEvent
  49. from tasks.models import Task
  50. from trails.models import Trail
  51. from videogames import retroarch
  52. from videogames.models import VideoGame
  53. from videos.models import Series, Video
  54. from webpages.models import WebPage
  55. logger = logging.getLogger(__name__)
  56. User = get_user_model()
  57. BNULL = {"blank": True, "null": True}
  58. POINTS_FOR_MOVEMENT_HISTORY = int(
  59. getattr(settings, "POINTS_FOR_MOVEMENT_HISTORY", 3)
  60. )
  61. class BaseFileImportMixin(TimeStampedModel):
  62. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  63. uuid = models.UUIDField(editable=False, default=uuid4)
  64. processing_started = models.DateTimeField(**BNULL)
  65. processed_finished = models.DateTimeField(**BNULL)
  66. process_log = models.TextField(**BNULL)
  67. process_count = models.IntegerField(**BNULL)
  68. class Meta:
  69. abstract = True
  70. def __str__(self):
  71. return f"{self.import_type} import on {self.human_start}"
  72. @property
  73. def human_start(self):
  74. start = "Unknown"
  75. if self.processing_started:
  76. start = self.processing_started.strftime("%B %d, %Y at %H:%M")
  77. return start
  78. @property
  79. def import_type(self) -> str:
  80. return "Unknown Import Source"
  81. def process(self, force=False):
  82. logger.warning("Process not implemented")
  83. def undo(self, dryrun=False):
  84. """Accepts the log from a scrobble import and removes the scrobbles"""
  85. from scrobbles.models import Scrobble
  86. if not self.process_log:
  87. logger.warning("No lines in process log found to undo")
  88. return
  89. for line in self.process_log.split("\n"):
  90. scrobble_id = line.split("\t")[0]
  91. scrobble = Scrobble.objects.filter(id=scrobble_id).first()
  92. if not scrobble:
  93. logger.warning(
  94. f"Could not find scrobble {scrobble_id} to undo"
  95. )
  96. continue
  97. logger.info(f"Removing scrobble {scrobble_id}")
  98. if not dryrun:
  99. scrobble.delete()
  100. self.processed_finished = None
  101. self.processing_started = None
  102. self.process_count = None
  103. self.process_log = ""
  104. self.save(
  105. update_fields=[
  106. "processed_finished",
  107. "processing_started",
  108. "process_log",
  109. "process_count",
  110. ]
  111. )
  112. def scrobbles(self) -> models.QuerySet:
  113. scrobble_ids = []
  114. if self.process_log:
  115. for line in self.process_log.split("\n"):
  116. sid = line.split("\t")[0]
  117. if sid:
  118. scrobble_ids.append(sid)
  119. return Scrobble.objects.filter(id__in=scrobble_ids)
  120. def mark_started(self):
  121. self.processing_started = timezone.now()
  122. self.save(update_fields=["processing_started"])
  123. def mark_finished(self):
  124. self.processed_finished = timezone.now()
  125. self.save(update_fields=["processed_finished"])
  126. def record_log(self, scrobbles):
  127. self.process_log = ""
  128. if not scrobbles:
  129. self.process_count = 0
  130. self.save(update_fields=["process_log", "process_count"])
  131. return
  132. for count, scrobble in enumerate(scrobbles):
  133. scrobble_str = (
  134. f"{scrobble.id}\t{scrobble.timestamp}\t{scrobble.media_obj}"
  135. )
  136. log_line = f"{scrobble_str}"
  137. if count > 0:
  138. log_line = "\n" + log_line
  139. self.process_log += log_line
  140. self.process_count = len(scrobbles)
  141. self.save(update_fields=["process_log", "process_count"])
  142. @property
  143. def upload_file_path(self):
  144. raise NotImplementedError
  145. class KoReaderImport(BaseFileImportMixin):
  146. class Meta:
  147. verbose_name = "KOReader Import"
  148. @property
  149. def import_type(self) -> str:
  150. return "KOReader"
  151. def get_absolute_url(self):
  152. return reverse(
  153. "scrobbles:koreader-import-detail", kwargs={"slug": self.uuid}
  154. )
  155. def get_path(instance, filename):
  156. extension = filename.split(".")[-1]
  157. uuid = instance.uuid
  158. return f"koreader-uploads/{uuid}.{extension}"
  159. @property
  160. def upload_file_path(self) -> str:
  161. if getattr(settings, "USE_S3_STORAGE"):
  162. path = self.sqlite_file.url
  163. else:
  164. path = self.sqlite_file.path
  165. return path
  166. sqlite_file = models.FileField(upload_to=get_path, **BNULL)
  167. def save_sqlite_file_to_self(self, file_path):
  168. with open(file_path, "rb") as f:
  169. self.sqlite_file.save(
  170. f"{self.user_id}-koreader-statistics.sqlite",
  171. File(f),
  172. save=True,
  173. )
  174. def file_md5_hash(self) -> str:
  175. if self.sqlite_file:
  176. return get_file_md5_hash(self.sqlite_file.path)
  177. return ""
  178. def process(self, force=False):
  179. if self.user.id == 1:
  180. fix_profile_historic_timezones(self.user.profile)
  181. if self.processed_finished and not force:
  182. logger.info(
  183. f"{self} already processed on {self.processed_finished}"
  184. )
  185. return
  186. self.mark_started()
  187. scrobbles = process_koreader_sqlite_file(
  188. self.upload_file_path, self.user.id
  189. )
  190. self.record_log(scrobbles)
  191. self.mark_finished()
  192. class AudioScrobblerTSVImport(BaseFileImportMixin):
  193. class Meta:
  194. verbose_name = "AudioScrobbler TSV Import"
  195. @property
  196. def import_type(self) -> str:
  197. return "AudiosScrobbler"
  198. def get_absolute_url(self):
  199. return reverse(
  200. "scrobbles:tsv-import-detail", kwargs={"slug": self.uuid}
  201. )
  202. def get_path(instance, filename):
  203. extension = filename.split(".")[-1]
  204. uuid = instance.uuid
  205. return f"audioscrobbler-uploads/{uuid}.{extension}"
  206. @property
  207. def upload_file_path(self):
  208. if getattr(settings, "USE_S3_STORAGE"):
  209. path = self.tsv_file.url
  210. else:
  211. path = self.tsv_file.path
  212. return path
  213. tsv_file = models.FileField(upload_to=get_path, **BNULL)
  214. def process(self, force=False):
  215. from scrobbles.importers.tsv import import_audioscrobbler_tsv_file
  216. if self.user.id == 1:
  217. fix_profile_historic_timezones(self.user.profile)
  218. if self.processed_finished and not force:
  219. logger.info(
  220. f"{self} already processed on {self.processed_finished}"
  221. )
  222. return
  223. self.mark_started()
  224. scrobbles = import_audioscrobbler_tsv_file(
  225. self.upload_file_path, self.user.id
  226. )
  227. self.record_log(scrobbles)
  228. self.mark_finished()
  229. class LastFmImport(BaseFileImportMixin):
  230. class Meta:
  231. verbose_name = "Last.FM Import"
  232. @property
  233. def import_type(self) -> str:
  234. return "LastFM"
  235. def get_absolute_url(self):
  236. return reverse(
  237. "scrobbles:lastfm-import-detail", kwargs={"slug": self.uuid}
  238. )
  239. def process(self, import_all=False):
  240. """Import scrobbles found on LastFM"""
  241. if self.user.id == 1:
  242. fix_profile_historic_timezones(self.user.profile)
  243. if self.processed_finished:
  244. logger.info(
  245. f"{self} already processed on {self.processed_finished}"
  246. )
  247. return
  248. last_import = None
  249. if not import_all:
  250. try:
  251. last_import = LastFmImport.objects.exclude(id=self.id).last()
  252. except:
  253. pass
  254. if not import_all and not last_import:
  255. logger.warn(
  256. "No previous import, to import all Last.fm scrobbles, pass import_all=True"
  257. )
  258. return
  259. lastfm = LastFM(self.user)
  260. last_processed = None
  261. if last_import:
  262. last_processed = last_import.processed_finished
  263. self.mark_started()
  264. scrobbles = lastfm.import_from_lastfm(last_processed)
  265. self.record_log(scrobbles)
  266. self.mark_finished()
  267. class RetroarchImport(BaseFileImportMixin):
  268. class Meta:
  269. verbose_name = "Retroarch Import"
  270. @property
  271. def import_type(self) -> str:
  272. return "Retroarch"
  273. def get_absolute_url(self):
  274. return reverse(
  275. "scrobbles:retroarch-import-detail", kwargs={"slug": self.uuid}
  276. )
  277. def process(self, import_all=False, force=False):
  278. """Import scrobbles found on Retroarch"""
  279. if self.user.id == 1:
  280. fix_profile_historic_timezones(self.user.profile)
  281. if self.processed_finished and not force:
  282. logger.info(
  283. f"{self} already processed on {self.processed_finished}"
  284. )
  285. return
  286. if force:
  287. logger.info(f"You told me to force import from Retroarch")
  288. if not self.user.profile.retroarch_path:
  289. logger.info(
  290. "Tying to import Retroarch logs, but user has no retroarch_path configured"
  291. )
  292. self.mark_started()
  293. scrobbles = retroarch.import_retroarch_lrtl_files(
  294. self.user.profile.retroarch_path,
  295. self.user.id,
  296. )
  297. self.record_log(scrobbles)
  298. self.mark_finished()
  299. class ChartRecord(TimeStampedModel):
  300. """Sort of like a materialized view for what we could dynamically generate,
  301. but would kill the DB as it gets larger. Collects time-based records
  302. generated by a cron-like archival job
  303. 1972 by Josh Rouse - #3 in 2023, January
  304. """
  305. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  306. rank = models.IntegerField(db_index=True)
  307. count = models.IntegerField(default=0)
  308. year = models.IntegerField(**BNULL)
  309. month = models.IntegerField(**BNULL)
  310. week = models.IntegerField(**BNULL)
  311. day = models.IntegerField(**BNULL)
  312. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  313. series = models.ForeignKey(Series, on_delete=models.DO_NOTHING, **BNULL)
  314. artist = models.ForeignKey(Artist, on_delete=models.DO_NOTHING, **BNULL)
  315. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  316. period_start = models.DateTimeField(**BNULL)
  317. period_end = models.DateTimeField(**BNULL)
  318. def save(self, *args, **kwargs):
  319. profile = self.user.profile
  320. if self.week:
  321. # set start and end to start and end of week
  322. period = datetime.date.fromisocalendar(self.year, self.week, 1)
  323. self.period_start = start_of_week(period, profile)
  324. self.period_start = end_of_week(period, profile)
  325. if self.day:
  326. period = datetime.datetime(self.year, self.month, self.day)
  327. self.period_start = start_of_day(period, profile)
  328. self.period_end = end_of_day(period, profile)
  329. if self.month and not self.day:
  330. period = datetime.datetime(self.year, self.month, 1)
  331. self.period_start = start_of_month(period, profile)
  332. self.period_end = end_of_month(period, profile)
  333. super(ChartRecord, self).save(*args, **kwargs)
  334. @property
  335. def media_obj(self):
  336. media_obj = None
  337. if self.video:
  338. media_obj = self.video
  339. if self.track:
  340. media_obj = self.track
  341. if self.artist:
  342. media_obj = self.artist
  343. return media_obj
  344. @property
  345. def month_str(self) -> str:
  346. month_str = ""
  347. if self.month:
  348. month_str = calendar.month_name[self.month]
  349. return month_str
  350. @property
  351. def day_str(self) -> str:
  352. day_str = ""
  353. if self.day:
  354. day_str = str(self.day)
  355. return day_str
  356. @property
  357. def week_str(self) -> str:
  358. week_str = ""
  359. if self.week:
  360. week_str = str(self.week)
  361. return "Week " + week_str
  362. @property
  363. def period(self) -> str:
  364. period = str(self.year)
  365. if self.month:
  366. period = " ".join([self.month_str, period])
  367. if self.week:
  368. period = " ".join([self.week_str, period])
  369. if self.day:
  370. period = " ".join([self.day_str, period])
  371. return period
  372. @property
  373. def period_type(self) -> str:
  374. period = "year"
  375. if self.month:
  376. period = "month"
  377. if self.week:
  378. period = "week"
  379. if self.day:
  380. period = "day"
  381. return period
  382. def __str__(self):
  383. title = f"#{self.rank} in {self.period}"
  384. if self.day or self.week:
  385. title = f"#{self.rank} on {self.period}"
  386. return title
  387. def link(self):
  388. get_params = f"?date={self.year}"
  389. if self.week:
  390. get_params = get_params = get_params + f"-W{self.week}"
  391. if self.month:
  392. get_params = get_params = get_params + f"-{self.month}"
  393. if self.day:
  394. get_params = get_params = get_params + f"-{self.day}"
  395. if self.artist:
  396. get_params = get_params + "&media=Artist"
  397. return reverse("scrobbles:charts-home") + get_params
  398. @classmethod
  399. def build(cls, user, **kwargs):
  400. build_charts(user=user, **kwargs)
  401. @classmethod
  402. def for_year(cls, user, year):
  403. return cls.objects.filter(year=year, user=user)
  404. @classmethod
  405. def for_month(cls, user, year, month):
  406. return cls.objects.filter(year=year, month=month, user=user)
  407. @classmethod
  408. def for_day(cls, user, year, day, month):
  409. return cls.objects.filter(year=year, month=month, day=day, user=user)
  410. @classmethod
  411. def for_week(cls, user, year, week):
  412. return cls.objects.filter(year=year, week=week, user=user)
  413. class Scrobble(TimeStampedModel):
  414. """A scrobble tracks played media items by a user."""
  415. class MediaType(models.TextChoices):
  416. """Enum mapping a media model type to a string"""
  417. VIDEO = "Video", "Video"
  418. TRACK = "Track", "Track"
  419. PODCAST_EPISODE = "PodcastEpisode", "Podcast episode"
  420. SPORT_EVENT = "SportEvent", "Sport event"
  421. BOOK = "Book", "Book"
  422. PAPER = "Paper", "Paper"
  423. VIDEO_GAME = "VideoGame", "Video game"
  424. BOARD_GAME = "BoardGame", "Board game"
  425. GEO_LOCATION = "GeoLocation", "GeoLocation"
  426. TRAIL = "Trail", "Trail"
  427. BEER = "Beer", "Beer"
  428. PUZZLE = "Puzzle", "Puzzle"
  429. FOOD = "Food", "Food"
  430. TASK = "Task", "Task"
  431. WEBPAGE = "WebPage", "Web Page"
  432. LIFE_EVENT = "LifeEvent", "Life event"
  433. MOOD = "Mood", "Mood"
  434. BRICKSET = "BrickSet", "Brick set"
  435. @classmethod
  436. def list(cls):
  437. return list(map(lambda c: c.value, cls))
  438. uuid = models.UUIDField(editable=False, **BNULL)
  439. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  440. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  441. podcast_episode = models.ForeignKey(
  442. PodcastEpisode, on_delete=models.DO_NOTHING, **BNULL
  443. )
  444. sport_event = models.ForeignKey(
  445. SportEvent, on_delete=models.DO_NOTHING, **BNULL
  446. )
  447. book = models.ForeignKey(Book, on_delete=models.DO_NOTHING, **BNULL)
  448. paper = models.ForeignKey(Paper, on_delete=models.DO_NOTHING, **BNULL)
  449. video_game = models.ForeignKey(
  450. VideoGame, on_delete=models.DO_NOTHING, **BNULL
  451. )
  452. board_game = models.ForeignKey(
  453. BoardGame, on_delete=models.DO_NOTHING, **BNULL
  454. )
  455. geo_location = models.ForeignKey(
  456. GeoLocation, on_delete=models.DO_NOTHING, **BNULL
  457. )
  458. beer = models.ForeignKey(Beer, on_delete=models.DO_NOTHING, **BNULL)
  459. puzzle = models.ForeignKey(Puzzle, on_delete=models.DO_NOTHING, **BNULL)
  460. food = models.ForeignKey(Food, on_delete=models.DO_NOTHING, **BNULL)
  461. trail = models.ForeignKey(Trail, on_delete=models.DO_NOTHING, **BNULL)
  462. task = models.ForeignKey(Task, on_delete=models.DO_NOTHING, **BNULL)
  463. web_page = models.ForeignKey(WebPage, on_delete=models.DO_NOTHING, **BNULL)
  464. life_event = models.ForeignKey(
  465. LifeEvent, on_delete=models.DO_NOTHING, **BNULL
  466. )
  467. mood = models.ForeignKey(Mood, on_delete=models.DO_NOTHING, **BNULL)
  468. brick_set = models.ForeignKey(
  469. BrickSet, on_delete=models.DO_NOTHING, **BNULL
  470. )
  471. media_type = models.CharField(
  472. max_length=14, choices=MediaType.choices, default=MediaType.VIDEO
  473. )
  474. user = models.ForeignKey(
  475. User, blank=True, null=True, on_delete=models.DO_NOTHING
  476. )
  477. # Time keeping
  478. timestamp = models.DateTimeField(**BNULL)
  479. stop_timestamp = models.DateTimeField(**BNULL)
  480. playback_position_seconds = models.IntegerField(**BNULL)
  481. # Status indicators
  482. is_paused = models.BooleanField(default=False)
  483. played_to_completion = models.BooleanField(default=False)
  484. in_progress = models.BooleanField(default=True)
  485. # Metadata
  486. source = models.CharField(max_length=255, **BNULL)
  487. log = models.JSONField(
  488. **BNULL,
  489. default=dict,
  490. encoder=logdata.ScrobbleLogDataEncoder,
  491. decoder=logdata.ScrobbleLogDataDecoder,
  492. )
  493. timezone = models.CharField(max_length=50, **BNULL)
  494. # Fields for keeping track of video game data
  495. videogame_save_data = models.FileField(
  496. upload_to="scrobbles/videogame_save_data/", **BNULL
  497. )
  498. gpx_file = models.FileField(upload_to="scrobbles/gpx_file/", **BNULL)
  499. screenshot = models.ImageField(
  500. upload_to="scrobbles/videogame_screenshot/", **BNULL
  501. )
  502. screenshot_small = ImageSpecField(
  503. source="screenshot",
  504. processors=[ResizeToFit(100, 100)],
  505. format="JPEG",
  506. options={"quality": 60},
  507. )
  508. screenshot_medium = ImageSpecField(
  509. source="screenshot",
  510. processors=[ResizeToFit(300, 300)],
  511. format="JPEG",
  512. options={"quality": 75},
  513. )
  514. long_play_seconds = models.BigIntegerField(**BNULL)
  515. long_play_complete = models.BooleanField(**BNULL)
  516. @classmethod
  517. def for_year(cls, user, year):
  518. return cls.objects.filter(timestamp__year=year, user=user).order_by(
  519. "-timestamp"
  520. )
  521. @classmethod
  522. def for_month(cls, user, year, month):
  523. return cls.objects.filter(
  524. timestamp__year=year, timestamp__month=month, user=user
  525. ).order_by("-timestamp")
  526. @classmethod
  527. def for_day(cls, user, year, month, day):
  528. return cls.objects.filter(
  529. timestamp__year=year,
  530. timestamp__month=month,
  531. timestamp__day=day,
  532. user=user,
  533. ).order_by("-timestamp")
  534. @classmethod
  535. def for_week(cls, user, year, week):
  536. return cls.objects.filter(
  537. timestamp__year=year, timestamp__week=week, user=user
  538. ).order_by("-timestamp")
  539. @classmethod
  540. def as_dict_by_type(cls, scrobble_qs: models.QuerySet) -> dict:
  541. scrobbles_by_type = defaultdict(list)
  542. for scrobble in scrobble_qs:
  543. scrobbles_by_type[scrobble.media_type].append(scrobble)
  544. if not scrobbles_by_type.get(scrobble.media_type + "_count"):
  545. scrobbles_by_type[scrobble.media_type + "_count"] = 0
  546. scrobbles_by_type[scrobble.media_type + "_count"] += 1
  547. if not scrobbles_by_type.get(scrobble.media_type + "_time"):
  548. scrobbles_by_type[scrobble.media_type + "_time"] = 0
  549. scrobbles_by_type[scrobble.media_type + "_time"] += int(
  550. (scrobble.elapsed_time)
  551. )
  552. # Remove any locations without titles
  553. if "GeoLocation" in scrobbles_by_type.keys():
  554. for loc_scrobble in scrobbles_by_type["GeoLocation"]:
  555. if not loc_scrobble.media_obj.title:
  556. scrobbles_by_type["GeoLocation"].remove(loc_scrobble)
  557. scrobbles_by_type["GeoLocation_count"] -= 1
  558. return scrobbles_by_type
  559. @classmethod
  560. def in_progress_for_user(cls, user_id: int) -> models.QuerySet:
  561. return cls.objects.filter(
  562. user=user_id,
  563. in_progress=True,
  564. played_to_completion=False,
  565. is_paused=False,
  566. )
  567. @property
  568. def last_serial_scrobble(self) -> Optional["Scrobble"]:
  569. from scrobbles.models import Scrobble
  570. if self.logdata and self.logdata.serial_scrobble_id:
  571. return Scrobble.objects.filter(
  572. id=self.logdata.serial_scrobble_id
  573. ).first()
  574. @property
  575. def finish_url(self) -> str:
  576. return reverse("scrobbles:finish", kwargs={"uuid": self.uuid})
  577. def save(self, *args, **kwargs):
  578. if not self.uuid:
  579. self.uuid = uuid4()
  580. if not self.timezone:
  581. timezone = settings.TIME_ZONE
  582. if self.user and self.user.profile:
  583. timezone = self.user.profile.timezone
  584. self.timezone = timezone
  585. # Microseconds mess up Django's filtering, and we don't need be that specific
  586. if self.timestamp:
  587. self.timestamp = self.timestamp.replace(microsecond=0)
  588. if self.media_obj:
  589. self.media_type = self.MediaType(self.media_obj.__class__.__name__)
  590. return super(Scrobble, self).save(*args, **kwargs)
  591. def get_absolute_url(self):
  592. if not self.uuid:
  593. self.uuid = uuid4()
  594. self.save()
  595. return reverse("scrobbles:detail", kwargs={"uuid": self.uuid})
  596. def push_to_archivebox(self):
  597. pushable_media = hasattr(
  598. self.media_obj, "push_to_archivebox"
  599. ) and callable(self.media_obj.push_to_archivebox)
  600. if pushable_media and self.user.profile.archivebox_url:
  601. try:
  602. self.media_obj.push_to_archivebox(
  603. url=self.user.profile.archivebox_url,
  604. username=self.user.profile.archivebox_username,
  605. password=self.user.profile.archivebox_password,
  606. )
  607. except Exception:
  608. logger.info(
  609. "Failed to push URL to archivebox",
  610. extra={
  611. "archivebox_url": self.user.profile.archivebox_url,
  612. "archivebox_username": self.user.profile.archivebox_username,
  613. },
  614. )
  615. @property
  616. def logdata(self) -> Optional[logdata.BaseLogData]:
  617. if self.media_obj:
  618. logdata_cls = self.media_obj.logdata_cls
  619. else:
  620. logdata_cls = logdata.BaseLogData
  621. log_dict = self.log
  622. if isinstance(self.log, str):
  623. # There's nothing stopping django from saving a string in a JSONField :(
  624. logger.warning(
  625. "[scrobbles] Received string in JSON data in log",
  626. extra={"log": self.log},
  627. )
  628. log_dict = json.loads(self.log)
  629. if not log_dict:
  630. log_dict = {}
  631. try:
  632. return logdata_cls(**log_dict)
  633. except ParseError as e:
  634. logger.warning(
  635. "Could not parse log data",
  636. extra={
  637. "log_dict": log_dict,
  638. "scrobble_id": self.id,
  639. "error": e,
  640. },
  641. )
  642. return logdata_cls()
  643. except TypeError as e:
  644. return logdata_cls()
  645. def redirect_url(self, user_id) -> str:
  646. user = User.objects.filter(id=user_id).first()
  647. redirect_url = self.media_obj.get_absolute_url()
  648. if (
  649. self.media_type == self.MediaType.WEBPAGE
  650. and user
  651. and user.profile.redirect_to_webpage
  652. ):
  653. logger.info(f"Redirecting to {self.media_obj.url}")
  654. redirect_url = self.media_obj.url
  655. if (
  656. self.media_type == self.MediaType.VIDEO
  657. and self.media_obj.youtube_id
  658. ):
  659. redirect_url = self.media_obj.youtube_link
  660. return redirect_url
  661. @property
  662. def tzinfo(self):
  663. return ZoneInfo(self.timezone)
  664. @property
  665. def local_timestamp(self):
  666. return timezone.localtime(self.timestamp, timezone=self.tzinfo)
  667. @property
  668. def local_stop_timestamp(self):
  669. if self.stop_timestamp:
  670. return timezone.localtime(
  671. self.stop_timestamp, timezone=self.tzinfo
  672. )
  673. @property
  674. def scrobble_media_key(self) -> str:
  675. return media_class_to_foreign_key(self.media_type) + "_id"
  676. @property
  677. def status(self) -> str:
  678. if self.is_paused:
  679. return "paused"
  680. if self.played_to_completion:
  681. return "finished"
  682. if self.in_progress:
  683. return "in-progress"
  684. return "zombie"
  685. @property
  686. def is_stale(self) -> bool:
  687. """Mark scrobble as stale if it's been more than an hour since it was updated
  688. Effectively, this allows 'resuming' a video scrobble within an hour of starting it.
  689. """
  690. is_stale = False
  691. now = timezone.now()
  692. seconds_since_last_update = (now - self.modified).total_seconds()
  693. if seconds_since_last_update >= self.media_obj.SECONDS_TO_STALE:
  694. is_stale = True
  695. return is_stale
  696. @property
  697. def previous(self) -> "Scrobble":
  698. return (
  699. self.media_obj.scrobble_set.order_by("-timestamp")
  700. .filter(timestamp__lt=self.timestamp)
  701. .first()
  702. )
  703. @property
  704. def next(self) -> "Scrobble":
  705. return (
  706. self.media_obj.scrobble_set.order_by("timestamp")
  707. .filter(timestamp__gt=self.timestamp)
  708. .first()
  709. )
  710. @property
  711. def previous_by_media(self) -> "Scrobble":
  712. return (
  713. Scrobble.objects.filter(
  714. media_type=self.media_type,
  715. user=self.user,
  716. timestamp__lt=self.timestamp,
  717. )
  718. .order_by("-timestamp")
  719. .first()
  720. )
  721. @property
  722. def next_by_media(self) -> "Scrobble":
  723. return (
  724. Scrobble.objects.filter(
  725. media_type=self.media_type,
  726. user=self.user,
  727. timestamp__gt=self.timestamp,
  728. )
  729. .order_by("-timestamp")
  730. .first()
  731. )
  732. @property
  733. def previous_by_user(self) -> "Scrobble":
  734. return (
  735. Scrobble.objects.order_by("-timestamp")
  736. .filter(timestamp__lt=self.timestamp)
  737. .first()
  738. )
  739. @property
  740. def next_by_user(self) -> "Scrobble":
  741. return (
  742. Scrobble.objects.order_by("-timestamp")
  743. .filter(timestamp__gt=self.timestamp)
  744. .first()
  745. )
  746. @property
  747. def session_pages_read(self) -> Optional[int]:
  748. pages_read = 0
  749. if self.log and isinstance(self.log, dict):
  750. pages_read = self.log.get("pages_read", 0)
  751. return pages_read
  752. @property
  753. def is_long_play(self) -> bool:
  754. return self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values()
  755. @property
  756. def elapsed_time(self) -> int | None:
  757. if self.played_to_completion:
  758. if self.playback_position_seconds:
  759. return self.playback_position_seconds
  760. if self.media_obj and self.media_obj.run_time_seconds:
  761. return self.media_obj.run_time_seconds
  762. return (timezone.now() - self.timestamp).seconds
  763. @property
  764. def percent_played(self) -> int:
  765. if not self.media_obj:
  766. return 0
  767. if self.media_obj and not self.media_obj.run_time_seconds:
  768. return 100
  769. if not self.playback_position_seconds and self.played_to_completion:
  770. return 100
  771. playback_seconds = self.playback_position_seconds
  772. if not playback_seconds:
  773. playback_seconds = self.elapsed_time
  774. run_time_secs = self.media_obj.run_time_seconds
  775. percent = int((playback_seconds / run_time_secs) * 100)
  776. if self.is_long_play:
  777. long_play_secs = 0
  778. if self.previous and not self.previous.long_play_complete:
  779. long_play_secs = self.previous.long_play_seconds or 0
  780. percent = int(
  781. ((playback_seconds + long_play_secs) / run_time_secs) * 100
  782. )
  783. return percent
  784. @property
  785. def probably_still_in_progress(self) -> bool:
  786. """Add our start time to our media run time to get when we expect to
  787. Audio tracks should be given a second or two of grace, videos should
  788. be given closer to 30 minutes, because the odds of watching it back to
  789. back are very slim.
  790. """
  791. is_in_progress = False
  792. padding_seconds = MEDIA_END_PADDING_SECONDS.get(self.media_type)
  793. if not padding_seconds:
  794. return is_in_progress
  795. if not self.media_obj:
  796. logger.info(
  797. "[scrobbling] scrobble has no media obj",
  798. extra={
  799. "media_id": self.media_obj,
  800. "scrobble_id": self.id,
  801. "media_type": self.media_type,
  802. "probably_still_in_progress": is_in_progress,
  803. },
  804. )
  805. return is_in_progress
  806. if not self.media_obj.run_time_seconds:
  807. logger.info(
  808. "[scrobbling] media has no run time seconds, cannot calculate end",
  809. extra={
  810. "media_id": self.media_obj.id,
  811. "scrobble_id": self.id,
  812. "media_type": self.media_type,
  813. "probably_still_in_progress": is_in_progress,
  814. },
  815. )
  816. return is_in_progress
  817. expected_end = self.timestamp + datetime.timedelta(
  818. seconds=self.media_obj.run_time_seconds
  819. )
  820. expected_end_padded = expected_end + datetime.timedelta(
  821. seconds=padding_seconds
  822. )
  823. # Take our start time, add our media length and an extra 30 min (1800s) is it still in the future? keep going
  824. is_in_progress = expected_end_padded > pendulum.now()
  825. logger.info(
  826. "[scrobbling] checking if we're probably still playing",
  827. extra={
  828. "media_id": self.media_obj.id,
  829. "scrobble_id": self.id,
  830. "media_type": self.media_type,
  831. "probably_still_in_progress": is_in_progress,
  832. },
  833. )
  834. return is_in_progress
  835. @property
  836. def can_be_updated(self) -> bool:
  837. if self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values() and self.source != "readcomicsonline.ru":
  838. logger.info(
  839. "[scrobbling] cannot be updated, long play media",
  840. extra={
  841. "media_id": self.media_obj.id,
  842. "scrobble_id": self.id,
  843. "media_type": self.media_type,
  844. },
  845. )
  846. return False
  847. if self.percent_played >= 100 and not self.probably_still_in_progress:
  848. logger.info(
  849. "[scrobbling] cannot be updated, existing scrobble is 100% played",
  850. extra={
  851. "media_id": self.media_obj.id,
  852. "scrobble_id": self.id,
  853. "media_type": self.media_type,
  854. },
  855. )
  856. return False
  857. if self.is_stale:
  858. logger.info(
  859. "[scrobbling] cannot be udpated, stale",
  860. extra={
  861. "media_id": self.media_obj.id,
  862. "scrobble_id": self.id,
  863. "media_type": self.media_type,
  864. },
  865. )
  866. return False
  867. logger.info(
  868. "[scrobbling] can be updated",
  869. extra={
  870. "media_id": self.media_obj.id,
  871. "scrobble_id": self.id,
  872. "media_type": self.media_type,
  873. },
  874. )
  875. return True
  876. @classmethod
  877. def by_date(cls, media_type: str = "Track"):
  878. cls.objects.filter(media_type=media_type).values(
  879. "timestamp__date"
  880. ).annotate(count=models.Count("id")).values(
  881. "timestamp__date", "count"
  882. ).order_by(
  883. "-count",
  884. )
  885. @property
  886. def media_obj(self):
  887. media_obj = None
  888. if self.video:
  889. media_obj = self.video
  890. if self.track:
  891. media_obj = self.track
  892. if self.podcast_episode:
  893. media_obj = self.podcast_episode
  894. if self.sport_event:
  895. media_obj = self.sport_event
  896. if self.book:
  897. media_obj = self.book
  898. if self.video_game:
  899. media_obj = self.video_game
  900. if self.board_game:
  901. media_obj = self.board_game
  902. if self.geo_location:
  903. media_obj = self.geo_location
  904. if self.web_page:
  905. media_obj = self.web_page
  906. if self.life_event:
  907. media_obj = self.life_event
  908. if self.mood:
  909. media_obj = self.mood
  910. if self.brick_set:
  911. media_obj = self.brick_set
  912. if self.trail:
  913. media_obj = self.trail
  914. if self.beer:
  915. media_obj = self.beer
  916. if self.puzzle:
  917. media_obj = self.puzzle
  918. if self.task:
  919. media_obj = self.task
  920. if self.food:
  921. media_obj = self.food
  922. return media_obj
  923. def __str__(self):
  924. return f"Scrobble of {self.media_obj} ({self.timestamp})"
  925. def calc_reading_duration(self) -> int:
  926. duration = 0
  927. if self.logdata.page_data:
  928. for k, v in self.logdata.page_data.items():
  929. duration += v.get("duration")
  930. return duration
  931. def calc_pages_read(self) -> int:
  932. pages_read = 0
  933. if self.logdata.page_data:
  934. pages = [int(k) for k in self.logdata.page_data.keys()]
  935. pages.sort()
  936. if len(pages) == 1:
  937. pages_read = 1
  938. elif len(pages) >= 2:
  939. pages_read += pages[-1] - pages[0]
  940. else:
  941. pages_read = pages[-1] - pages[0]
  942. return pages_read
  943. @property
  944. def last_page_read(self) -> int:
  945. last_page = 0
  946. if self.logdata.page_data:
  947. pages = [int(k) for k in self.logdata.page_data.keys()]
  948. pages.sort()
  949. last_page = pages[-1]
  950. return last_page
  951. @property
  952. def get_media_source_url(self) -> str:
  953. url = ""
  954. if self.media_type == "Website":
  955. url = self.media_obj.url
  956. if self.media_type == "Task":
  957. url = self.media_obj.source_url_for_user(self.user)
  958. return url
  959. @classmethod
  960. def create_or_update(
  961. cls, media, user_id: int, scrobble_data: dict, **kwargs
  962. ) -> "Scrobble":
  963. key = media_class_to_foreign_key(media.__class__.__name__)
  964. media_query = models.Q(**{key: media})
  965. scrobble_data[key + "_id"] = media.id
  966. skip_in_progress_check = kwargs.get("skip_in_progress_check", False)
  967. read_log_page = kwargs.get("read_log_page", None)
  968. # Find our last scrobble of this media item (track, video, etc)
  969. scrobble = (
  970. cls.objects.filter(
  971. media_query,
  972. user_id=user_id,
  973. )
  974. .order_by("-timestamp")
  975. .first()
  976. )
  977. source = scrobble_data.get("source", "Vrobbler")
  978. mtype = media.__class__.__name__
  979. # GeoLocations are a special case scrobble
  980. if mtype == cls.MediaType.GEO_LOCATION:
  981. logger.warning(
  982. f"[create_or_update] geoloc requires create_or_update_location"
  983. )
  984. scrobble = cls.create_or_update_location(
  985. media, scrobble_data, user_id
  986. )
  987. return scrobble
  988. if not skip_in_progress_check or read_log_page:
  989. logger.info(
  990. f"[create_or_update] check for existing scrobble to update ",
  991. extra={
  992. "scrobble_id": scrobble.id if scrobble else None,
  993. "media_type": mtype,
  994. "media_id": media.id,
  995. "scrobble_data": scrobble_data,
  996. },
  997. )
  998. scrobble_data["playback_status"] = scrobble_data.pop(
  999. "status", None
  1000. )
  1001. # If it's marked as stopped, send it through our update mechanism, which will complete it
  1002. if scrobble and (
  1003. scrobble.can_be_updated
  1004. or (read_log_page and scrobble.can_be_updated)
  1005. or scrobble_data["playback_status"] == "stopped"
  1006. ):
  1007. if read_log_page:
  1008. page_list = scrobble.log.get("page_data", [])
  1009. if page_list:
  1010. for page in page_list:
  1011. if not page.get("end_ts", None):
  1012. page["end_ts"] = int(timezone.now().timestamp())
  1013. page["duration"] = page["end_ts"] - page.get("start_ts")
  1014. page_list.append(
  1015. BookPageLogData(
  1016. page_number=read_log_page,
  1017. start_ts=int(timezone.now().timestamp())
  1018. )
  1019. )
  1020. scrobble.log["page_data"] = page_list
  1021. scrobble.save(update_fields=["log"])
  1022. elif "log" in scrobble_data.keys() and scrobble.log:
  1023. scrobble_data["log"] = scrobble.log | scrobble_data["log"]
  1024. return scrobble.update(scrobble_data)
  1025. # Discard status before creating
  1026. scrobble_data.pop("playback_status")
  1027. if read_log_page:
  1028. scrobble_data["log"] = BookLogData(page_data=[BookPageLogData(page_number=read_log_page, start_ts=int(timezone.now().timestamp()))])
  1029. logger.info(
  1030. f"[scrobbling] creating new scrobble",
  1031. extra={
  1032. "scrobble_id": scrobble.id if scrobble else None,
  1033. "media_type": mtype,
  1034. "media_id": media.id,
  1035. "source": source,
  1036. },
  1037. )
  1038. if mtype == cls.MediaType.FOOD and not scrobble_data.get("log", {}).get("calories", None):
  1039. if media.calories:
  1040. scrobble_data["log"] = FoodLogData(calories=media.calories)
  1041. scrobble = cls.create(scrobble_data)
  1042. return scrobble
  1043. @classmethod
  1044. def create_or_update_location(
  1045. cls, location: GeoLocation, scrobble_data: dict, user_id: int
  1046. ) -> "Scrobble":
  1047. """Location is special type, where the current scrobble for a user is always the
  1048. current active scrobble, and we only finish it a move on if we get a new location
  1049. that is far enough (and far enough over the last three past scrobbles) to have
  1050. actually moved.
  1051. """
  1052. key = media_class_to_foreign_key(location.__class__.__name__)
  1053. scrobble_data[key + "_id"] = location.id
  1054. scrobble = (
  1055. cls.objects.filter(
  1056. media_type=cls.MediaType.GEO_LOCATION,
  1057. user_id=user_id,
  1058. timestamp__lte=scrobble_data.get("timestamp"),
  1059. )
  1060. .order_by("-timestamp")
  1061. .first()
  1062. )
  1063. logger.info(
  1064. f"[scrobbling] fetching last location scrobble",
  1065. extra={
  1066. "scrobble_id": scrobble.id if scrobble else None,
  1067. "media_type": cls.MediaType.GEO_LOCATION,
  1068. "media_id": location.id,
  1069. "scrobble_data": scrobble_data,
  1070. },
  1071. )
  1072. if not scrobble:
  1073. logger.info(
  1074. f"[scrobbling] finished - no existing location scrobbles found",
  1075. extra={
  1076. "media_id": location.id,
  1077. "media_type": cls.MediaType.GEO_LOCATION,
  1078. },
  1079. )
  1080. return cls.create(scrobble_data)
  1081. if scrobble.media_obj == location:
  1082. logger.info(
  1083. f"[scrobbling] finished - same location - not moved",
  1084. extra={
  1085. "media_type": cls.MediaType.GEO_LOCATION,
  1086. "media_id": location.id,
  1087. "scrobble_id": scrobble.id,
  1088. "scrobble_media_id": scrobble.media_obj.id,
  1089. },
  1090. )
  1091. return scrobble
  1092. has_moved = location.has_moved(scrobble.media_obj)
  1093. logger.info(
  1094. f"[scrobbling] checking - has last location has moved?",
  1095. extra={
  1096. "scrobble_id": scrobble.id,
  1097. "scrobble_media_id": scrobble.media_obj.id,
  1098. "media_type": cls.MediaType.GEO_LOCATION,
  1099. "media_id": location.id,
  1100. "has_moved": has_moved,
  1101. },
  1102. )
  1103. if not has_moved:
  1104. logger.info(
  1105. f"[scrobbling] finished - not from old location - not moved",
  1106. extra={
  1107. "scrobble_id": scrobble.id,
  1108. "media_id": location.id,
  1109. "media_type": cls.MediaType.GEO_LOCATION,
  1110. "old_media__id": scrobble.media_obj.id,
  1111. },
  1112. )
  1113. return scrobble
  1114. if existing_locations := location.in_proximity(named=True):
  1115. existing_location = existing_locations.first()
  1116. ts = int(pendulum.now().timestamp())
  1117. scrobble.log[
  1118. ts
  1119. ] = f"Location {location.id} too close to this scrobble"
  1120. scrobble.save(update_fields=["log"])
  1121. logger.info(
  1122. f"[scrobbling] finished - found existing named location",
  1123. extra={
  1124. "media_id": location.id,
  1125. "media_type": cls.MediaType.GEO_LOCATION,
  1126. "old_media_id": existing_location.id,
  1127. },
  1128. )
  1129. return scrobble
  1130. scrobble.stop(force_finish=True)
  1131. scrobble = cls.create(scrobble_data)
  1132. logger.info(
  1133. f"[scrobbling] finished - created for location",
  1134. extra={
  1135. "scrobble_id": scrobble.id,
  1136. "media_id": location.id,
  1137. "scrobble_data": scrobble_data,
  1138. "media_type": cls.MediaType.GEO_LOCATION,
  1139. "source": scrobble_data.get("source"),
  1140. },
  1141. )
  1142. return scrobble
  1143. def update(self, scrobble_data: dict) -> "Scrobble":
  1144. # Status is a field we get from Mopidy, which refuses to poll us
  1145. playback_status = scrobble_data.pop("playback_status", None)
  1146. logger.info(
  1147. "[update] called",
  1148. extra={
  1149. "scrobble_id": self.id,
  1150. "scrobble_data": scrobble_data,
  1151. "media_type": self.media_type,
  1152. "playback_status": playback_status,
  1153. },
  1154. )
  1155. if self.beyond_completion_percent:
  1156. playback_status = "stopped"
  1157. if playback_status == "stopped":
  1158. self.stop()
  1159. if playback_status == "paused":
  1160. self.pause()
  1161. if playback_status == "resumed":
  1162. self.resume()
  1163. if playback_status != "resumed":
  1164. scrobble_data["stop_timestamp"] = (
  1165. scrobble_data.pop("timestamp", None) or timezone.now()
  1166. )
  1167. # timestamp should be more-or-less immutable
  1168. scrobble_data.pop("timestamp", None)
  1169. update_fields = []
  1170. for key, value in scrobble_data.items():
  1171. setattr(self, key, value)
  1172. update_fields.append(key)
  1173. self.save(update_fields=update_fields)
  1174. return self
  1175. @classmethod
  1176. def create(
  1177. cls,
  1178. scrobble_data: dict,
  1179. ) -> "Scrobble":
  1180. scrobble = cls.objects.create(**scrobble_data)
  1181. ScrobbleNtfyNotification(scrobble).send()
  1182. return scrobble
  1183. def stop(self, timestamp=None, force_finish=False) -> None:
  1184. self.stop_timestamp = timestamp or timezone.now()
  1185. self.played_to_completion = True
  1186. self.in_progress = False
  1187. if not self.playback_position_seconds:
  1188. self.playback_position_seconds = int(
  1189. (self.stop_timestamp - self.timestamp).total_seconds()
  1190. )
  1191. self.save(
  1192. update_fields=[
  1193. "in_progress",
  1194. "played_to_completion",
  1195. "stop_timestamp",
  1196. "playback_position_seconds",
  1197. ]
  1198. )
  1199. class_name = self.media_obj.__class__.__name__
  1200. if class_name in LONG_PLAY_MEDIA.values():
  1201. self.finish_long_play()
  1202. if class_name == "Book":
  1203. self.calculate_reading_stats()
  1204. logger.info(
  1205. f"[scrobbling] stopped",
  1206. extra={
  1207. "scrobble_id": self.id,
  1208. "media_id": self.media_obj.id,
  1209. "media_type": self.media_type,
  1210. "source": self.source,
  1211. },
  1212. )
  1213. def pause(self) -> None:
  1214. if self.is_paused:
  1215. logger.warning(f"{self.id} - already paused - {self.source}")
  1216. return
  1217. self.is_paused = True
  1218. self.save(update_fields=["is_paused"])
  1219. logger.info(
  1220. f"[scrobbling] paused",
  1221. extra={
  1222. "scrobble_id": self.id,
  1223. "media_type": self.media_type,
  1224. "source": self.source,
  1225. },
  1226. )
  1227. def resume(self) -> None:
  1228. if self.is_paused or not self.in_progress:
  1229. self.is_paused = False
  1230. self.in_progress = True
  1231. self.save(update_fields=["is_paused", "in_progress"])
  1232. logger.info(
  1233. f"[scrobbling] resumed",
  1234. extra={
  1235. "scrobble_id": self.id,
  1236. "media_type": self.media_type,
  1237. "source": self.source,
  1238. },
  1239. )
  1240. def cancel(self) -> None:
  1241. self.delete()
  1242. def update_ticks(self, data) -> None:
  1243. self.playback_position_seconds = data.get("playback_position_seconds")
  1244. self.save(update_fields=["playback_position_seconds"])
  1245. def finish_long_play(self):
  1246. seconds_elapsed = (timezone.now() - self.timestamp).seconds
  1247. past_seconds = 0
  1248. # Set our playback seconds, and calc long play seconds
  1249. self.playback_position_seconds = seconds_elapsed
  1250. if self.previous:
  1251. past_seconds = self.previous.long_play_seconds or 0
  1252. self.long_play_seconds = past_seconds + seconds_elapsed
  1253. # Long play scrobbles are always finished when we say they are
  1254. self.played_to_completion = True
  1255. self.save(
  1256. update_fields=[
  1257. "playback_position_seconds",
  1258. "played_to_completion",
  1259. "long_play_seconds",
  1260. ]
  1261. )
  1262. logger.info(
  1263. f"[scrobbling] finishing long play",
  1264. extra={
  1265. "scrobble_id": self.id,
  1266. },
  1267. )
  1268. @property
  1269. def beyond_completion_percent(self) -> bool:
  1270. """Returns true if our media is beyond our completion percent, unless
  1271. our type is geolocation in which case we always return false
  1272. """
  1273. beyond_completion = (
  1274. self.percent_played >= self.media_obj.COMPLETION_PERCENT
  1275. )
  1276. if self.media_type == "GeoLocation":
  1277. logger.info(
  1278. f"[scrobbling] locations are ONLY completed when new one is created",
  1279. extra={
  1280. "scrobble_id": self.id,
  1281. "media_type": self.media_type,
  1282. "beyond_completion": beyond_completion,
  1283. },
  1284. )
  1285. beyond_completion = False
  1286. return beyond_completion
  1287. def calculate_reading_stats(self, commit=True):
  1288. # --- Sort safely by numeric page_number ---
  1289. def safe_page_number(entry):
  1290. try:
  1291. return int(getattr("page_number", entry), 0)
  1292. except (ValueError, TypeError):
  1293. return float("inf") # push invalid entries to the end
  1294. page_data = self.log.get("page_data")
  1295. if not page_data:
  1296. logger.warning("No page data found to calculate")
  1297. return
  1298. if isinstance(page_data, dict):
  1299. logger.warning("Page data is dict, migrate koreader data")
  1300. return
  1301. page_data.sort(key=safe_page_number)
  1302. # --- Extract valid numeric page numbers ---
  1303. valid_pages = []
  1304. for page in page_data:
  1305. try:
  1306. valid_pages.append(int(page["page_number"]))
  1307. except (ValueError, TypeError):
  1308. continue
  1309. # --- Compute stats ---
  1310. if valid_pages:
  1311. self.log["page_start"] = min(valid_pages)
  1312. self.log["page_end"] = max(valid_pages)
  1313. self.log["pages_read"] = len(set(valid_pages))
  1314. if commit:
  1315. self.save(update_fields=["log"])