models.py 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204
  1. import pytz
  2. import calendar
  3. import datetime
  4. import logging
  5. from decimal import Decimal
  6. from typing import Iterable, Optional
  7. from uuid import uuid4
  8. import pendulum
  9. from boardgames.models import BoardGame
  10. from books.koreader import process_koreader_sqlite_file
  11. from books.models import Book
  12. from django.conf import settings
  13. from django.contrib.auth import get_user_model
  14. from django.db import models
  15. from django.urls import reverse
  16. from django.utils import timezone
  17. from django.utils.functional import cached_property
  18. from django_extensions.db.models import TimeStampedModel
  19. from imagekit.models import ImageSpecField
  20. from imagekit.processors import ResizeToFit
  21. from locations.models import GeoLocation
  22. from music.lastfm import LastFM
  23. from music.models import Artist, Track
  24. from podcasts.models import PodcastEpisode
  25. from profiles.utils import (
  26. end_of_day,
  27. end_of_month,
  28. end_of_week,
  29. start_of_day,
  30. start_of_month,
  31. start_of_week,
  32. )
  33. from scrobbles.constants import LONG_PLAY_MEDIA
  34. from scrobbles.stats import build_charts
  35. from scrobbles.utils import media_class_to_foreign_key
  36. from sports.models import SportEvent
  37. from videogames import retroarch
  38. from videogames.models import VideoGame
  39. from videos.models import Series, Video
  40. from webpages.models import WebPage
  41. from vrobbler.apps.scrobbles.constants import MEDIA_END_PADDING_SECONDS
  42. logger = logging.getLogger(__name__)
  43. User = get_user_model()
  44. BNULL = {"blank": True, "null": True}
  45. POINTS_FOR_MOVEMENT_HISTORY = int(
  46. getattr(settings, "POINTS_FOR_MOVEMENT_HISTORY", 3)
  47. )
  48. class BaseFileImportMixin(TimeStampedModel):
  49. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  50. uuid = models.UUIDField(editable=False, default=uuid4)
  51. processing_started = models.DateTimeField(**BNULL)
  52. processed_finished = models.DateTimeField(**BNULL)
  53. process_log = models.TextField(**BNULL)
  54. process_count = models.IntegerField(**BNULL)
  55. class Meta:
  56. abstract = True
  57. def __str__(self):
  58. return f"{self.import_type} import on {self.human_start}"
  59. @property
  60. def human_start(self):
  61. start = "Unknown"
  62. if self.processing_started:
  63. start = self.processing_started.strftime("%B %d, %Y at %H:%M")
  64. return start
  65. @property
  66. def import_type(self) -> str:
  67. return "Unknown Import Source"
  68. def process(self, force=False):
  69. logger.warning("Process not implemented")
  70. def undo(self, dryrun=False):
  71. """Accepts the log from a scrobble import and removes the scrobbles"""
  72. from scrobbles.models import Scrobble
  73. if not self.process_log:
  74. logger.warning("No lines in process log found to undo")
  75. return
  76. for line in self.process_log.split("\n"):
  77. scrobble_id = line.split("\t")[0]
  78. scrobble = Scrobble.objects.filter(id=scrobble_id).first()
  79. if not scrobble:
  80. logger.warning(
  81. f"Could not find scrobble {scrobble_id} to undo"
  82. )
  83. continue
  84. logger.info(f"Removing scrobble {scrobble_id}")
  85. if not dryrun:
  86. scrobble.delete()
  87. self.processed_finished = None
  88. self.processing_started = None
  89. self.process_count = None
  90. self.process_log = ""
  91. self.save(
  92. update_fields=[
  93. "processed_finished",
  94. "processing_started",
  95. "process_log",
  96. "process_count",
  97. ]
  98. )
  99. def scrobbles(self) -> models.QuerySet:
  100. scrobble_ids = []
  101. if self.process_log:
  102. for line in self.process_log.split("\n"):
  103. sid = line.split("\t")[0]
  104. if sid:
  105. scrobble_ids.append(sid)
  106. return Scrobble.objects.filter(id__in=scrobble_ids)
  107. def mark_started(self):
  108. self.processing_started = timezone.now()
  109. self.save(update_fields=["processing_started"])
  110. def mark_finished(self):
  111. self.processed_finished = timezone.now()
  112. self.save(update_fields=["processed_finished"])
  113. def record_log(self, scrobbles):
  114. self.process_log = ""
  115. if not scrobbles:
  116. self.process_count = 0
  117. self.save(update_fields=["process_log", "process_count"])
  118. return
  119. for count, scrobble in enumerate(scrobbles):
  120. scrobble_str = f"{scrobble.id}\t{scrobble.timestamp}\t{scrobble.media_obj.title}"
  121. log_line = f"{scrobble_str}"
  122. if count > 0:
  123. log_line = "\n" + log_line
  124. self.process_log += log_line
  125. self.process_count = len(scrobbles)
  126. self.save(update_fields=["process_log", "process_count"])
  127. @property
  128. def upload_file_path(self):
  129. raise NotImplementedError
  130. class KoReaderImport(BaseFileImportMixin):
  131. class Meta:
  132. verbose_name = "KOReader Import"
  133. @property
  134. def import_type(self) -> str:
  135. return "KOReader"
  136. def get_absolute_url(self):
  137. return reverse(
  138. "scrobbles:koreader-import-detail", kwargs={"slug": self.uuid}
  139. )
  140. def get_path(instance, filename):
  141. extension = filename.split(".")[-1]
  142. uuid = instance.uuid
  143. return f"koreader-uploads/{uuid}.{extension}"
  144. @property
  145. def upload_file_path(self) -> str:
  146. if getattr(settings, "USE_S3_STORAGE"):
  147. path = self.sqlite_file.url
  148. else:
  149. path = self.sqlite_file.path
  150. return path
  151. sqlite_file = models.FileField(upload_to=get_path, **BNULL)
  152. def process(self, force=False):
  153. if self.processed_finished and not force:
  154. logger.info(
  155. f"{self} already processed on {self.processed_finished}"
  156. )
  157. return
  158. self.mark_started()
  159. scrobbles = process_koreader_sqlite_file(
  160. self.upload_file_path, self.user.id
  161. )
  162. self.record_log(scrobbles)
  163. self.mark_finished()
  164. class AudioScrobblerTSVImport(BaseFileImportMixin):
  165. class Meta:
  166. verbose_name = "AudioScrobbler TSV Import"
  167. @property
  168. def import_type(self) -> str:
  169. return "AudiosScrobbler"
  170. def get_absolute_url(self):
  171. return reverse(
  172. "scrobbles:tsv-import-detail", kwargs={"slug": self.uuid}
  173. )
  174. def get_path(instance, filename):
  175. extension = filename.split(".")[-1]
  176. uuid = instance.uuid
  177. return f"audioscrobbler-uploads/{uuid}.{extension}"
  178. @property
  179. def upload_file_path(self):
  180. if getattr(settings, "USE_S3_STORAGE"):
  181. path = self.tsv_file.url
  182. else:
  183. path = self.tsv_file.path
  184. return path
  185. tsv_file = models.FileField(upload_to=get_path, **BNULL)
  186. def process(self, force=False):
  187. from scrobbles.tsv import process_audioscrobbler_tsv_file
  188. if self.processed_finished and not force:
  189. logger.info(
  190. f"{self} already processed on {self.processed_finished}"
  191. )
  192. return
  193. self.mark_started()
  194. tz = None
  195. user_id = None
  196. if self.user:
  197. user_id = self.user.id
  198. tz = self.user.profile.tzinfo
  199. scrobbles = process_audioscrobbler_tsv_file(
  200. self.upload_file_path, user_id, user_tz=tz
  201. )
  202. self.record_log(scrobbles)
  203. self.mark_finished()
  204. class LastFmImport(BaseFileImportMixin):
  205. class Meta:
  206. verbose_name = "Last.FM Import"
  207. @property
  208. def import_type(self) -> str:
  209. return "LastFM"
  210. def get_absolute_url(self):
  211. return reverse(
  212. "scrobbles:lastfm-import-detail", kwargs={"slug": self.uuid}
  213. )
  214. def process(self, import_all=False):
  215. """Import scrobbles found on LastFM"""
  216. if self.processed_finished:
  217. logger.info(
  218. f"{self} already processed on {self.processed_finished}"
  219. )
  220. return
  221. last_import = None
  222. if not import_all:
  223. try:
  224. last_import = LastFmImport.objects.exclude(id=self.id).last()
  225. except:
  226. pass
  227. if not import_all and not last_import:
  228. logger.warn(
  229. "No previous import, to import all Last.fm scrobbles, pass import_all=True"
  230. )
  231. return
  232. lastfm = LastFM(self.user)
  233. last_processed = None
  234. if last_import:
  235. last_processed = last_import.processed_finished
  236. self.mark_started()
  237. scrobbles = lastfm.import_from_lastfm(last_processed)
  238. self.record_log(scrobbles)
  239. self.mark_finished()
  240. class RetroarchImport(BaseFileImportMixin):
  241. class Meta:
  242. verbose_name = "Retroarch Import"
  243. @property
  244. def import_type(self) -> str:
  245. return "Retroarch"
  246. def get_absolute_url(self):
  247. return reverse(
  248. "scrobbles:retroarch-import-detail", kwargs={"slug": self.uuid}
  249. )
  250. def process(self, import_all=False, force=False):
  251. """Import scrobbles found on Retroarch"""
  252. if self.processed_finished and not force:
  253. logger.info(
  254. f"{self} already processed on {self.processed_finished}"
  255. )
  256. return
  257. if force:
  258. logger.info(f"You told me to force import from Retroarch")
  259. if not self.user.profile.retroarch_path:
  260. logger.info(
  261. "Tying to import Retroarch logs, but user has no retroarch_path configured"
  262. )
  263. self.mark_started()
  264. scrobbles = retroarch.import_retroarch_lrtl_files(
  265. self.user.profile.retroarch_path,
  266. self.user.id,
  267. )
  268. self.record_log(scrobbles)
  269. self.mark_finished()
  270. class ChartRecord(TimeStampedModel):
  271. """Sort of like a materialized view for what we could dynamically generate,
  272. but would kill the DB as it gets larger. Collects time-based records
  273. generated by a cron-like archival job
  274. 1972 by Josh Rouse - #3 in 2023, January
  275. """
  276. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  277. rank = models.IntegerField(db_index=True)
  278. count = models.IntegerField(default=0)
  279. year = models.IntegerField(**BNULL)
  280. month = models.IntegerField(**BNULL)
  281. week = models.IntegerField(**BNULL)
  282. day = models.IntegerField(**BNULL)
  283. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  284. series = models.ForeignKey(Series, on_delete=models.DO_NOTHING, **BNULL)
  285. artist = models.ForeignKey(Artist, on_delete=models.DO_NOTHING, **BNULL)
  286. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  287. period_start = models.DateTimeField(**BNULL)
  288. period_end = models.DateTimeField(**BNULL)
  289. def save(self, *args, **kwargs):
  290. profile = self.user.profile
  291. if self.week:
  292. # set start and end to start and end of week
  293. period = datetime.date.fromisocalendar(self.year, self.week, 1)
  294. self.period_start = start_of_week(period, profile)
  295. self.period_start = end_of_week(period, profile)
  296. if self.day:
  297. period = datetime.datetime(self.year, self.month, self.day)
  298. self.period_start = start_of_day(period, profile)
  299. self.period_end = end_of_day(period, profile)
  300. if self.month and not self.day:
  301. period = datetime.datetime(self.year, self.month, 1)
  302. self.period_start = start_of_month(period, profile)
  303. self.period_end = end_of_month(period, profile)
  304. super(ChartRecord, self).save(*args, **kwargs)
  305. @property
  306. def media_obj(self):
  307. media_obj = None
  308. if self.video:
  309. media_obj = self.video
  310. if self.track:
  311. media_obj = self.track
  312. if self.artist:
  313. media_obj = self.artist
  314. return media_obj
  315. @property
  316. def month_str(self) -> str:
  317. month_str = ""
  318. if self.month:
  319. month_str = calendar.month_name[self.month]
  320. return month_str
  321. @property
  322. def day_str(self) -> str:
  323. day_str = ""
  324. if self.day:
  325. day_str = str(self.day)
  326. return day_str
  327. @property
  328. def week_str(self) -> str:
  329. week_str = ""
  330. if self.week:
  331. week_str = str(self.week)
  332. return "Week " + week_str
  333. @property
  334. def period(self) -> str:
  335. period = str(self.year)
  336. if self.month:
  337. period = " ".join([self.month_str, period])
  338. if self.week:
  339. period = " ".join([self.week_str, period])
  340. if self.day:
  341. period = " ".join([self.day_str, period])
  342. return period
  343. @property
  344. def period_type(self) -> str:
  345. period = "year"
  346. if self.month:
  347. period = "month"
  348. if self.week:
  349. period = "week"
  350. if self.day:
  351. period = "day"
  352. return period
  353. def __str__(self):
  354. title = f"#{self.rank} in {self.period}"
  355. if self.day or self.week:
  356. title = f"#{self.rank} on {self.period}"
  357. return title
  358. def link(self):
  359. get_params = f"?date={self.year}"
  360. if self.week:
  361. get_params = get_params = get_params + f"-W{self.week}"
  362. if self.month:
  363. get_params = get_params = get_params + f"-{self.month}"
  364. if self.day:
  365. get_params = get_params = get_params + f"-{self.day}"
  366. if self.artist:
  367. get_params = get_params + "&media=Artist"
  368. return reverse("scrobbles:charts-home") + get_params
  369. @classmethod
  370. def build(cls, user, **kwargs):
  371. build_charts(user=user, **kwargs)
  372. @classmethod
  373. def for_year(cls, user, year):
  374. return cls.objects.filter(year=year, user=user)
  375. @classmethod
  376. def for_month(cls, user, year, month):
  377. return cls.objects.filter(year=year, month=month, user=user)
  378. @classmethod
  379. def for_day(cls, user, year, day, month):
  380. return cls.objects.filter(year=year, month=month, day=day, user=user)
  381. @classmethod
  382. def for_week(cls, user, year, week):
  383. return cls.objects.filter(year=year, week=week, user=user)
  384. class Scrobble(TimeStampedModel):
  385. """A scrobble tracks played media items by a user."""
  386. class MediaType(models.TextChoices):
  387. """Enum mapping a media model type to a string"""
  388. VIDEO = "Video", "Video"
  389. TRACK = "Track", "Track"
  390. PODCAST_EPISODE = "PodcastEpisode", "Podcast episode"
  391. SPORT_EVENT = "SportEvent", "Sport event"
  392. BOOK = "Book", "Book"
  393. VIDEO_GAME = "VideoGame", "Video game"
  394. BOARD_GAME = "BoardGame", "Board game"
  395. GEO_LOCATION = "GeoLocation", "GeoLocation"
  396. WEBPAGE = "WebPage", "Web Page"
  397. uuid = models.UUIDField(editable=False, **BNULL)
  398. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  399. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  400. podcast_episode = models.ForeignKey(
  401. PodcastEpisode, on_delete=models.DO_NOTHING, **BNULL
  402. )
  403. sport_event = models.ForeignKey(
  404. SportEvent, on_delete=models.DO_NOTHING, **BNULL
  405. )
  406. book = models.ForeignKey(Book, on_delete=models.DO_NOTHING, **BNULL)
  407. video_game = models.ForeignKey(
  408. VideoGame, on_delete=models.DO_NOTHING, **BNULL
  409. )
  410. board_game = models.ForeignKey(
  411. BoardGame, on_delete=models.DO_NOTHING, **BNULL
  412. )
  413. geo_location = models.ForeignKey(
  414. GeoLocation, on_delete=models.DO_NOTHING, **BNULL
  415. )
  416. web_page = models.ForeignKey(WebPage, on_delete=models.DO_NOTHING, **BNULL)
  417. media_type = models.CharField(
  418. max_length=14, choices=MediaType.choices, default=MediaType.VIDEO
  419. )
  420. user = models.ForeignKey(
  421. User, blank=True, null=True, on_delete=models.DO_NOTHING
  422. )
  423. # Time keeping
  424. timestamp = models.DateTimeField(**BNULL)
  425. stop_timestamp = models.DateTimeField(**BNULL)
  426. playback_position_seconds = models.IntegerField(**BNULL)
  427. # Status indicators
  428. is_paused = models.BooleanField(default=False)
  429. played_to_completion = models.BooleanField(default=False)
  430. in_progress = models.BooleanField(default=True)
  431. # Metadata
  432. source = models.CharField(max_length=255, **BNULL)
  433. scrobble_log = models.JSONField(**BNULL)
  434. timezone = models.CharField(max_length=50, **BNULL)
  435. # Fields for keeping track of book data
  436. book_koreader_hash = models.CharField(max_length=50, **BNULL)
  437. book_pages_read = models.IntegerField(**BNULL)
  438. book_page_data = models.JSONField(**BNULL)
  439. # Fields for keeping track of video game data
  440. videogame_save_data = models.FileField(
  441. upload_to="scrobbles/videogame_save_data/", **BNULL
  442. )
  443. videogame_screenshot = models.ImageField(
  444. upload_to="scrobbles/videogame_screenshot/", **BNULL
  445. )
  446. videogame_screenshot_small = ImageSpecField(
  447. source="videogame_screenshot",
  448. processors=[ResizeToFit(100, 100)],
  449. format="JPEG",
  450. options={"quality": 60},
  451. )
  452. videogame_screenshot_medium = ImageSpecField(
  453. source="videogame_screenshot",
  454. processors=[ResizeToFit(300, 300)],
  455. format="JPEG",
  456. options={"quality": 75},
  457. )
  458. long_play_seconds = models.BigIntegerField(**BNULL)
  459. long_play_complete = models.BooleanField(**BNULL)
  460. def save(self, *args, **kwargs):
  461. if not self.uuid:
  462. self.uuid = uuid4()
  463. if not self.timezone:
  464. timezone = settings.TIME_ZONE
  465. if self.user and self.user.profile:
  466. timezone = self.user.profile.timezone
  467. self.timzeone = timezone
  468. # Microseconds mess up Django's filtering, and we don't need be that specific
  469. if self.timestamp:
  470. self.timestamp = self.timestamp.replace(microsecond=0)
  471. self.media_type = self.MediaType(self.media_obj.__class__.__name__)
  472. return super(Scrobble, self).save(*args, **kwargs)
  473. def push_to_archivebox(self):
  474. pushable_media = hasattr(
  475. self.media_obj, "push_to_archivebox"
  476. ) and callable(self.media_obj.push_to_archivebox)
  477. if pushable_media and self.user.profile.archivebox_url:
  478. try:
  479. self.media_obj.push_to_archivebox(
  480. url=self.user.profile.archivebox_url,
  481. username=self.user.profile.archivebox_username,
  482. password=self.user.profile.archivebox_password,
  483. )
  484. except Exception:
  485. logger.info(
  486. "Failed to push URL to archivebox",
  487. extra={
  488. "archivebox_url": self.user.profile.archivebox_url,
  489. "archivebox_username": self.user.profile.archivebox_username,
  490. },
  491. )
  492. @property
  493. def tzinfo(self):
  494. return pytz.timezone(self.timezone)
  495. @property
  496. def scrobble_media_key(self) -> str:
  497. return media_class_to_foreign_key(self.media_type) + "_id"
  498. @property
  499. def status(self) -> str:
  500. if self.is_paused:
  501. return "paused"
  502. if self.played_to_completion:
  503. return "finished"
  504. if self.in_progress:
  505. return "in-progress"
  506. return "zombie"
  507. @property
  508. def is_stale(self) -> bool:
  509. """Mark scrobble as stale if it's been more than an hour since it was updated"""
  510. is_stale = False
  511. now = timezone.now()
  512. seconds_since_last_update = (now - self.modified).seconds
  513. if seconds_since_last_update >= self.media_obj.SECONDS_TO_STALE:
  514. is_stale = True
  515. return is_stale
  516. @property
  517. def previous(self) -> "Scrobble":
  518. return (
  519. self.media_obj.scrobble_set.order_by("-timestamp")
  520. .filter(timestamp__lt=self.timestamp)
  521. .first()
  522. )
  523. @property
  524. def next(self) -> "Scrobble":
  525. return (
  526. self.media_obj.scrobble_set.order_by("timestamp")
  527. .filter(timestamp__gt=self.timestamp)
  528. .first()
  529. )
  530. @property
  531. def previous_by_media(self) -> "Scrobble":
  532. return (
  533. Scrobble.objects.filter(
  534. media_type=self.media_type,
  535. user=self.user,
  536. timestamp__lt=self.timestamp,
  537. )
  538. .order_by("-timestamp")
  539. .first()
  540. )
  541. @property
  542. def next_by_media(self) -> "Scrobble":
  543. return (
  544. Scrobble.objects.filter(
  545. media_type=self.media_type,
  546. user=self.user,
  547. timestamp__gt=self.timestamp,
  548. )
  549. .order_by("-timestamp")
  550. .first()
  551. )
  552. @property
  553. def previous_by_user(self) -> "Scrobble":
  554. return (
  555. Scrobble.objects.order_by("-timestamp")
  556. .filter(timestamp__lt=self.timestamp)
  557. .first()
  558. )
  559. @property
  560. def next_by_user(self) -> "Scrobble":
  561. return (
  562. Scrobble.objects.order_by("-timestamp")
  563. .filter(timestamp__gt=self.timestamp)
  564. .first()
  565. )
  566. @property
  567. def session_pages_read(self) -> Optional[int]:
  568. if not self.book_pages_read:
  569. return 0
  570. return self.book_pages_read
  571. @property
  572. def is_long_play(self) -> bool:
  573. return self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values()
  574. @property
  575. def percent_played(self) -> int:
  576. if not self.media_obj:
  577. return 0
  578. if self.media_obj and not self.media_obj.run_time_seconds:
  579. return 100
  580. if not self.playback_position_seconds and self.played_to_completion:
  581. return 100
  582. playback_seconds = self.playback_position_seconds
  583. if not playback_seconds:
  584. playback_seconds = (timezone.now() - self.timestamp).seconds
  585. run_time_secs = self.media_obj.run_time_seconds
  586. percent = int((playback_seconds / run_time_secs) * 100)
  587. if self.is_long_play:
  588. long_play_secs = 0
  589. if self.previous and not self.previous.long_play_complete:
  590. long_play_secs = self.previous.long_play_seconds or 0
  591. percent = int(
  592. ((playback_seconds + long_play_secs) / run_time_secs) * 100
  593. )
  594. return percent
  595. @property
  596. def probably_still_in_progress(self) -> bool:
  597. """Add our start time to our media run time to get when we expect to
  598. Audio tracks should be given a second or two of grace, videos should
  599. be given closer to 30 minutes, because the odds of watching it back to
  600. back are very slim.
  601. """
  602. is_in_progress = False
  603. padding_seconds = MEDIA_END_PADDING_SECONDS.get(self.media_type)
  604. if not padding_seconds:
  605. return is_in_progress
  606. expected_end = self.timestamp + datetime.timedelta(
  607. seconds=self.media_obj.run_time_seconds
  608. )
  609. expected_end_padded = expected_end + datetime.timedelta(
  610. seconds=padding_seconds
  611. )
  612. # Take our start time, add our media length and an extra 30 min (1800s) is it still in the future? keep going
  613. is_in_progress = expected_end_padded > pendulum.now()
  614. logger.info(
  615. "[scrobbling] checking if we're probably still playing",
  616. extra={
  617. "media_id": self.media_obj.id,
  618. "scrobble_id": self.id,
  619. "media_type": self.media_type,
  620. "probably_still_in_progress": is_in_progress,
  621. },
  622. )
  623. return is_in_progress
  624. @property
  625. def can_be_updated(self) -> bool:
  626. if self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values():
  627. logger.info(
  628. "[scrobbling] cannot be updated, long play media",
  629. extra={
  630. "media_id": self.media_obj.id,
  631. "scrobble_id": self.id,
  632. "media_type": self.media_type,
  633. },
  634. )
  635. return False
  636. if self.percent_played >= 100 and not self.probably_still_in_progress:
  637. logger.info(
  638. "[scrobbling] cannot be updated, existing scrobble is 100% played",
  639. extra={
  640. "media_id": self.media_obj.id,
  641. "scrobble_id": self.id,
  642. "media_type": self.media_type,
  643. },
  644. )
  645. return False
  646. if self.is_stale:
  647. logger.info(
  648. "[scrobbling] cannot be udpated, stale",
  649. extra={
  650. "media_id": self.media_obj.id,
  651. "scrobble_id": self.id,
  652. "media_type": self.media_type,
  653. },
  654. )
  655. return False
  656. logger.info(
  657. "[scrobbling] can be updated",
  658. extra={
  659. "media_id": self.media_obj.id,
  660. "scrobble_id": self.id,
  661. "media_type": self.media_type,
  662. },
  663. )
  664. return True
  665. @property
  666. def media_obj(self):
  667. media_obj = None
  668. if self.video:
  669. media_obj = self.video
  670. if self.track:
  671. media_obj = self.track
  672. if self.podcast_episode:
  673. media_obj = self.podcast_episode
  674. if self.sport_event:
  675. media_obj = self.sport_event
  676. if self.book:
  677. media_obj = self.book
  678. if self.video_game:
  679. media_obj = self.video_game
  680. if self.board_game:
  681. media_obj = self.board_game
  682. if self.geo_location:
  683. media_obj = self.geo_location
  684. if self.web_page:
  685. media_obj = self.web_page
  686. return media_obj
  687. def __str__(self):
  688. timestamp = self.timestamp.strftime("%Y-%m-%d")
  689. return f"Scrobble of {self.media_obj} ({timestamp})"
  690. def calc_reading_duration(self) -> int:
  691. duration = 0
  692. if self.book_page_data:
  693. for k, v in self.book_page_data.items():
  694. duration += v.get("duration")
  695. return duration
  696. def calc_pages_read(self) -> int:
  697. pages_read = 0
  698. if self.book_page_data:
  699. pages = [int(k) for k in self.book_page_data.keys()]
  700. pages.sort()
  701. if len(pages) == 1:
  702. pages_read = 1
  703. elif len(pages) >= 2:
  704. pages_read += pages[-1] - pages[0]
  705. else:
  706. pages_read = pages[-1] - pages[0]
  707. return pages_read
  708. @property
  709. def last_page_read(self) -> int:
  710. last_page = 0
  711. if self.book_page_data:
  712. pages = [int(k) for k in self.book_page_data.keys()]
  713. pages.sort()
  714. last_page = pages[-1]
  715. return last_page
  716. @classmethod
  717. def create_or_update(
  718. cls, media, user_id: int, scrobble_data: dict, **kwargs
  719. ) -> "Scrobble":
  720. key = media_class_to_foreign_key(media.__class__.__name__)
  721. media_query = models.Q(**{key: media})
  722. scrobble_data[key + "_id"] = media.id
  723. # Find our last scrobble of this media item (track, video, etc)
  724. scrobble = (
  725. cls.objects.filter(
  726. media_query,
  727. user_id=user_id,
  728. )
  729. .order_by("-timestamp")
  730. .first()
  731. )
  732. source = scrobble_data.get("source")
  733. mtype = media.__class__.__name__
  734. mopidy_status = scrobble_data.get("mopidy_status", None)
  735. # GeoLocations are a special case scrobble
  736. if mtype == cls.MediaType.GEO_LOCATION:
  737. logger.warn(
  738. f"[scrobbling] use create_or_update_location for GeoLocations"
  739. )
  740. scrobble = cls.create_or_update_location(
  741. media, scrobble_data, user_id
  742. )
  743. return scrobble
  744. logger.info(
  745. f"[scrobbling] check for existing scrobble to update ",
  746. extra={
  747. "scrobble_id": scrobble.id if scrobble else None,
  748. "media_type": mtype,
  749. "media_id": media.id,
  750. "scrobble_data": scrobble_data,
  751. "percent_played": scrobble.percent_played if scrobble else 0,
  752. "can_be_updated": scrobble.can_be_updated
  753. if scrobble
  754. else False,
  755. },
  756. )
  757. if scrobble and (
  758. scrobble.can_be_updated or mopidy_status == "stopped"
  759. ):
  760. return scrobble.update(scrobble_data)
  761. # Discard status before creating
  762. scrobble_data.pop("mopidy_status", None)
  763. scrobble_data.pop("jellyfin_status", None)
  764. logger.info(
  765. f"[scrobbling] creating new scrobble",
  766. extra={
  767. "scrobble_id": scrobble.id if scrobble else None,
  768. "media_type": mtype,
  769. "media_id": media.id,
  770. "source": source,
  771. },
  772. )
  773. return cls.create(scrobble_data)
  774. @classmethod
  775. def create_or_update_location(
  776. cls, location: GeoLocation, scrobble_data: dict, user_id: int
  777. ) -> "Scrobble":
  778. """Location is special type, where the current scrobble for a user is always the
  779. current active scrobble, and we only finish it a move on if we get a new location
  780. that is far enough (and far enough over the last three past scrobbles) to have
  781. actually moved.
  782. """
  783. key = media_class_to_foreign_key(location.__class__.__name__)
  784. scrobble_data[key + "_id"] = location.id
  785. scrobble = (
  786. cls.objects.filter(
  787. media_type=cls.MediaType.GEO_LOCATION,
  788. user_id=user_id,
  789. timestamp__lte=scrobble_data.get("timestamp"),
  790. )
  791. .order_by("-timestamp")
  792. .first()
  793. )
  794. logger.info(
  795. f"[scrobbling] fetching last location scrobble",
  796. extra={
  797. "scrobble_id": scrobble.id if scrobble else None,
  798. "media_type": cls.MediaType.GEO_LOCATION,
  799. "media_id": location.id,
  800. "scrobble_data": scrobble_data,
  801. },
  802. )
  803. if not scrobble:
  804. logger.info(
  805. f"[scrobbling] finished - no existing location scrobbles found",
  806. extra={
  807. "media_id": location.id,
  808. "media_type": cls.MediaType.GEO_LOCATION,
  809. },
  810. )
  811. return cls.create(scrobble_data)
  812. if scrobble.media_obj == location:
  813. logger.info(
  814. f"[scrobbling] finished - same location - not moved",
  815. extra={
  816. "media_type": cls.MediaType.GEO_LOCATION,
  817. "media_id": location.id,
  818. "scrobble_id": scrobble.id,
  819. "scrobble_media_id": scrobble.media_obj.id,
  820. },
  821. )
  822. return scrobble
  823. has_moved = location.has_moved(scrobble.media_obj)
  824. logger.info(
  825. f"[scrobbling] checking - has last location has moved?",
  826. extra={
  827. "scrobble_id": scrobble.id,
  828. "scrobble_media_id": scrobble.media_obj.id,
  829. "media_type": cls.MediaType.GEO_LOCATION,
  830. "media_id": location.id,
  831. "has_moved": has_moved,
  832. },
  833. )
  834. if not has_moved:
  835. logger.info(
  836. f"[scrobbling] finished - not from old location - not moved",
  837. extra={
  838. "scrobble_id": scrobble.id,
  839. "media_id": location.id,
  840. "media_type": cls.MediaType.GEO_LOCATION,
  841. "old_media__id": scrobble.media_obj.id,
  842. },
  843. )
  844. return scrobble
  845. if existing_locations := location.in_proximity(named=True):
  846. existing_location = existing_locations.first()
  847. scrobble.scrobble_log[
  848. pendulum.now.timestamp
  849. ] = f"Location {location.id} too close to this scrobble"
  850. scrobble.save(update_fields=["scrobble_log"])
  851. logger.info(
  852. f"[scrobbling] finished - found existing named location",
  853. extra={
  854. "media_id": location.id,
  855. "media_type": cls.MediaType.GEO_LOCATION,
  856. "old_media_id": existing_location.id,
  857. },
  858. )
  859. return scrobble
  860. scrobble.stop(force_finish=True)
  861. scrobble = cls.create(scrobble_data)
  862. logger.info(
  863. f"[scrobbling] finished - created for location",
  864. extra={
  865. "scrobble_id": scrobble.id,
  866. "media_id": location.id,
  867. "scrobble_data": scrobble_data,
  868. "media_type": cls.MediaType.GEO_LOCATION,
  869. "source": scrobble_data.get("source"),
  870. },
  871. )
  872. return scrobble
  873. def update(self, scrobble_data: dict) -> "Scrobble":
  874. # Status is a field we get from Mopidy, which refuses to poll us
  875. scrobble_status = scrobble_data.pop("mopidy_status", None)
  876. if not scrobble_status:
  877. scrobble_status = scrobble_data.pop("jellyfin_status", None)
  878. logger.info(
  879. "[scrobbling] update called",
  880. extra={
  881. "scrobble_id": self.id,
  882. "scrobble_data": scrobble_data,
  883. "media_type": self.media_type,
  884. "scrobble_status": scrobble_status,
  885. },
  886. )
  887. # This is really expensive on the DB ... do we need to track this?
  888. # if self.percent_played < 100:
  889. # # Only worry about ticks if we haven't gotten to the end
  890. # self.update_ticks(scrobble_data)
  891. if self.beyond_completion_percent:
  892. scrobble_status = "stopped"
  893. if scrobble_status == "stopped":
  894. self.stop()
  895. if scrobble_status == "paused":
  896. self.pause()
  897. if scrobble_status == "resumed":
  898. self.resume()
  899. if scrobble_status != "resumed":
  900. scrobble_data["stop_timestamp"] = (
  901. scrobble_data.pop("timestamp", None) or timezone.now()
  902. )
  903. # timestamp should be more-or-less immutable
  904. scrobble_data.pop("timestamp", None)
  905. update_fields = []
  906. for key, value in scrobble_data.items():
  907. setattr(self, key, value)
  908. update_fields.append(key)
  909. self.save(update_fields=update_fields)
  910. logger.info(
  911. "[scrobbling] update finished",
  912. extra={
  913. "scrobble_id": self.id,
  914. "scrobble_data": scrobble_data,
  915. "scrobble_status": scrobble_status,
  916. "media_type": self.media_type,
  917. },
  918. )
  919. return self
  920. @classmethod
  921. def create(
  922. cls,
  923. scrobble_data: dict,
  924. ) -> "Scrobble":
  925. scrobble_data["scrobble_log"] = {}
  926. scrobble = cls.objects.create(
  927. **scrobble_data,
  928. )
  929. return scrobble
  930. def stop(self, force_finish=False) -> None:
  931. self.stop_timestamp = timezone.now()
  932. self.played_to_completion = True
  933. self.in_progress = False
  934. if not self.playback_position_seconds:
  935. self.playback_position_seconds = int(
  936. (self.stop_timestamp - self.timestamp).total_seconds()
  937. )
  938. self.save(
  939. update_fields=[
  940. "in_progress",
  941. "played_to_completion",
  942. "stop_timestamp",
  943. "playback_position_seconds",
  944. ]
  945. )
  946. class_name = self.media_obj.__class__.__name__
  947. if class_name in LONG_PLAY_MEDIA.values():
  948. self.finish_long_play()
  949. logger.info(
  950. f"[scrobbling] stopped",
  951. extra={
  952. "scrobble_id": self.id,
  953. "media_id": self.media_obj.id,
  954. "media_type": self.media_type,
  955. "source": self.source,
  956. },
  957. )
  958. def pause(self) -> None:
  959. if self.is_paused:
  960. logger.warning(f"{self.id} - already paused - {self.source}")
  961. return
  962. self.is_paused = True
  963. self.save(update_fields=["is_paused"])
  964. logger.info(
  965. f"[scrobbling] paused",
  966. extra={
  967. "scrobble_id": self.id,
  968. "media_type": self.media_type,
  969. "source": self.source,
  970. },
  971. )
  972. def resume(self) -> None:
  973. if self.is_paused or not self.in_progress:
  974. self.is_paused = False
  975. self.in_progress = True
  976. self.save(update_fields=["is_paused", "in_progress"])
  977. logger.info(
  978. f"[scrobbling] resumed",
  979. extra={
  980. "scrobble_id": self.id,
  981. "media_type": self.media_type,
  982. "source": self.source,
  983. },
  984. )
  985. def cancel(self) -> None:
  986. self.delete()
  987. def update_ticks(self, data) -> None:
  988. self.playback_position_seconds = data.get("playback_position_seconds")
  989. self.save(update_fields=["playback_position_seconds"])
  990. def finish_long_play(self):
  991. seconds_elapsed = (timezone.now() - self.timestamp).seconds
  992. past_seconds = 0
  993. # Set our playback seconds, and calc long play seconds
  994. self.playback_position_seconds = seconds_elapsed
  995. if self.previous:
  996. past_seconds = self.previous.long_play_seconds
  997. self.long_play_seconds = past_seconds + seconds_elapsed
  998. # Long play scrobbles are always finished when we say they are
  999. self.played_to_completion = True
  1000. self.save(
  1001. update_fields=[
  1002. "playback_position_seconds",
  1003. "played_to_completion",
  1004. "long_play_seconds",
  1005. ]
  1006. )
  1007. logger.info(
  1008. f"[scrobbling] finishing long play",
  1009. extra={
  1010. "scrobble_id": self.id,
  1011. },
  1012. )
  1013. @property
  1014. def beyond_completion_percent(self) -> bool:
  1015. """Returns true if our media is beyond our completion percent, unless
  1016. our type is geolocation in which case we always return false
  1017. """
  1018. beyond_completion = (
  1019. self.percent_played >= self.media_obj.COMPLETION_PERCENT
  1020. )
  1021. if self.media_type == "GeoLocation":
  1022. logger.info(
  1023. f"[scrobbling] locations are ONLY completed when new one is created",
  1024. extra={
  1025. "scrobble_id": self.id,
  1026. "media_type": self.media_type,
  1027. "beyond_completion": beyond_completion,
  1028. },
  1029. )
  1030. beyond_completion = False
  1031. return beyond_completion