models.py 41 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277
  1. import calendar
  2. import datetime
  3. import json
  4. import logging
  5. from typing import Optional
  6. from uuid import uuid4
  7. import pendulum
  8. import pytz
  9. from boardgames.models import BoardGame
  10. from books.koreader import process_koreader_sqlite_file
  11. from books.models import Book
  12. from bricksets.models import BrickSet
  13. from django.conf import settings
  14. from django.contrib.auth import get_user_model
  15. from django.db import models
  16. from django.urls import reverse
  17. from django.utils import timezone
  18. from django_extensions.db.models import TimeStampedModel
  19. from imagekit.models import ImageSpecField
  20. from imagekit.processors import ResizeToFit
  21. from lifeevents.models import LifeEvent
  22. from locations.models import GeoLocation
  23. from moods.models import Mood
  24. from music.lastfm import LastFM
  25. from music.models import Artist, Track
  26. from podcasts.models import PodcastEpisode
  27. from profiles.utils import (
  28. end_of_day,
  29. end_of_month,
  30. end_of_week,
  31. start_of_day,
  32. start_of_month,
  33. start_of_week,
  34. )
  35. from scrobbles import dataclasses as logdata
  36. from scrobbles.constants import LONG_PLAY_MEDIA
  37. from scrobbles.stats import build_charts
  38. from scrobbles.utils import media_class_to_foreign_key
  39. from sports.models import SportEvent
  40. from tasks.models import Task
  41. from trails.models import Trail
  42. from videogames import retroarch
  43. from videogames.models import VideoGame
  44. from videos.models import Series, Video
  45. from webpages.models import WebPage
  46. from vrobbler.apps.scrobbles.constants import MEDIA_END_PADDING_SECONDS
  47. logger = logging.getLogger(__name__)
  48. User = get_user_model()
  49. BNULL = {"blank": True, "null": True}
  50. POINTS_FOR_MOVEMENT_HISTORY = int(
  51. getattr(settings, "POINTS_FOR_MOVEMENT_HISTORY", 3)
  52. )
  53. class BaseFileImportMixin(TimeStampedModel):
  54. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  55. uuid = models.UUIDField(editable=False, default=uuid4)
  56. processing_started = models.DateTimeField(**BNULL)
  57. processed_finished = models.DateTimeField(**BNULL)
  58. process_log = models.TextField(**BNULL)
  59. process_count = models.IntegerField(**BNULL)
  60. class Meta:
  61. abstract = True
  62. def __str__(self):
  63. return f"{self.import_type} import on {self.human_start}"
  64. @property
  65. def human_start(self):
  66. start = "Unknown"
  67. if self.processing_started:
  68. start = self.processing_started.strftime("%B %d, %Y at %H:%M")
  69. return start
  70. @property
  71. def import_type(self) -> str:
  72. return "Unknown Import Source"
  73. def process(self, force=False):
  74. logger.warning("Process not implemented")
  75. def undo(self, dryrun=False):
  76. """Accepts the log from a scrobble import and removes the scrobbles"""
  77. from scrobbles.models import Scrobble
  78. if not self.process_log:
  79. logger.warning("No lines in process log found to undo")
  80. return
  81. for line in self.process_log.split("\n"):
  82. scrobble_id = line.split("\t")[0]
  83. scrobble = Scrobble.objects.filter(id=scrobble_id).first()
  84. if not scrobble:
  85. logger.warning(
  86. f"Could not find scrobble {scrobble_id} to undo"
  87. )
  88. continue
  89. logger.info(f"Removing scrobble {scrobble_id}")
  90. if not dryrun:
  91. scrobble.delete()
  92. self.processed_finished = None
  93. self.processing_started = None
  94. self.process_count = None
  95. self.process_log = ""
  96. self.save(
  97. update_fields=[
  98. "processed_finished",
  99. "processing_started",
  100. "process_log",
  101. "process_count",
  102. ]
  103. )
  104. def scrobbles(self) -> models.QuerySet:
  105. scrobble_ids = []
  106. if self.process_log:
  107. for line in self.process_log.split("\n"):
  108. sid = line.split("\t")[0]
  109. if sid:
  110. scrobble_ids.append(sid)
  111. return Scrobble.objects.filter(id__in=scrobble_ids)
  112. def mark_started(self):
  113. self.processing_started = timezone.now()
  114. self.save(update_fields=["processing_started"])
  115. def mark_finished(self):
  116. self.processed_finished = timezone.now()
  117. self.save(update_fields=["processed_finished"])
  118. def record_log(self, scrobbles):
  119. self.process_log = ""
  120. if not scrobbles:
  121. self.process_count = 0
  122. self.save(update_fields=["process_log", "process_count"])
  123. return
  124. for count, scrobble in enumerate(scrobbles):
  125. scrobble_str = f"{scrobble.id}\t{scrobble.timestamp}\t{scrobble.media_obj.title}"
  126. log_line = f"{scrobble_str}"
  127. if count > 0:
  128. log_line = "\n" + log_line
  129. self.process_log += log_line
  130. self.process_count = len(scrobbles)
  131. self.save(update_fields=["process_log", "process_count"])
  132. @property
  133. def upload_file_path(self):
  134. raise NotImplementedError
  135. class KoReaderImport(BaseFileImportMixin):
  136. class Meta:
  137. verbose_name = "KOReader Import"
  138. @property
  139. def import_type(self) -> str:
  140. return "KOReader"
  141. def get_absolute_url(self):
  142. return reverse(
  143. "scrobbles:koreader-import-detail", kwargs={"slug": self.uuid}
  144. )
  145. def get_path(instance, filename):
  146. extension = filename.split(".")[-1]
  147. uuid = instance.uuid
  148. return f"koreader-uploads/{uuid}.{extension}"
  149. @property
  150. def upload_file_path(self) -> str:
  151. if getattr(settings, "USE_S3_STORAGE"):
  152. path = self.sqlite_file.url
  153. else:
  154. path = self.sqlite_file.path
  155. return path
  156. sqlite_file = models.FileField(upload_to=get_path, **BNULL)
  157. def process(self, force=False):
  158. if self.processed_finished and not force:
  159. logger.info(
  160. f"{self} already processed on {self.processed_finished}"
  161. )
  162. return
  163. self.mark_started()
  164. scrobbles = process_koreader_sqlite_file(
  165. self.upload_file_path, self.user.id
  166. )
  167. self.record_log(scrobbles)
  168. self.mark_finished()
  169. class AudioScrobblerTSVImport(BaseFileImportMixin):
  170. class Meta:
  171. verbose_name = "AudioScrobbler TSV Import"
  172. @property
  173. def import_type(self) -> str:
  174. return "AudiosScrobbler"
  175. def get_absolute_url(self):
  176. return reverse(
  177. "scrobbles:tsv-import-detail", kwargs={"slug": self.uuid}
  178. )
  179. def get_path(instance, filename):
  180. extension = filename.split(".")[-1]
  181. uuid = instance.uuid
  182. return f"audioscrobbler-uploads/{uuid}.{extension}"
  183. @property
  184. def upload_file_path(self):
  185. if getattr(settings, "USE_S3_STORAGE"):
  186. path = self.tsv_file.url
  187. else:
  188. path = self.tsv_file.path
  189. return path
  190. tsv_file = models.FileField(upload_to=get_path, **BNULL)
  191. def process(self, force=False):
  192. from scrobbles.tsv import process_audioscrobbler_tsv_file
  193. if self.processed_finished and not force:
  194. logger.info(
  195. f"{self} already processed on {self.processed_finished}"
  196. )
  197. return
  198. self.mark_started()
  199. tz = None
  200. user_id = None
  201. if self.user:
  202. user_id = self.user.id
  203. tz = self.user.profile.tzinfo
  204. scrobbles = process_audioscrobbler_tsv_file(
  205. self.upload_file_path, user_id, user_tz=tz
  206. )
  207. self.record_log(scrobbles)
  208. self.mark_finished()
  209. class LastFmImport(BaseFileImportMixin):
  210. class Meta:
  211. verbose_name = "Last.FM Import"
  212. @property
  213. def import_type(self) -> str:
  214. return "LastFM"
  215. def get_absolute_url(self):
  216. return reverse(
  217. "scrobbles:lastfm-import-detail", kwargs={"slug": self.uuid}
  218. )
  219. def process(self, import_all=False):
  220. """Import scrobbles found on LastFM"""
  221. if self.processed_finished:
  222. logger.info(
  223. f"{self} already processed on {self.processed_finished}"
  224. )
  225. return
  226. last_import = None
  227. if not import_all:
  228. try:
  229. last_import = LastFmImport.objects.exclude(id=self.id).last()
  230. except:
  231. pass
  232. if not import_all and not last_import:
  233. logger.warn(
  234. "No previous import, to import all Last.fm scrobbles, pass import_all=True"
  235. )
  236. return
  237. lastfm = LastFM(self.user)
  238. last_processed = None
  239. if last_import:
  240. last_processed = last_import.processed_finished
  241. self.mark_started()
  242. scrobbles = lastfm.import_from_lastfm(last_processed)
  243. self.record_log(scrobbles)
  244. self.mark_finished()
  245. class RetroarchImport(BaseFileImportMixin):
  246. class Meta:
  247. verbose_name = "Retroarch Import"
  248. @property
  249. def import_type(self) -> str:
  250. return "Retroarch"
  251. def get_absolute_url(self):
  252. return reverse(
  253. "scrobbles:retroarch-import-detail", kwargs={"slug": self.uuid}
  254. )
  255. def process(self, import_all=False, force=False):
  256. """Import scrobbles found on Retroarch"""
  257. if self.processed_finished and not force:
  258. logger.info(
  259. f"{self} already processed on {self.processed_finished}"
  260. )
  261. return
  262. if force:
  263. logger.info(f"You told me to force import from Retroarch")
  264. if not self.user.profile.retroarch_path:
  265. logger.info(
  266. "Tying to import Retroarch logs, but user has no retroarch_path configured"
  267. )
  268. self.mark_started()
  269. scrobbles = retroarch.import_retroarch_lrtl_files(
  270. self.user.profile.retroarch_path,
  271. self.user.id,
  272. )
  273. self.record_log(scrobbles)
  274. self.mark_finished()
  275. class ChartRecord(TimeStampedModel):
  276. """Sort of like a materialized view for what we could dynamically generate,
  277. but would kill the DB as it gets larger. Collects time-based records
  278. generated by a cron-like archival job
  279. 1972 by Josh Rouse - #3 in 2023, January
  280. """
  281. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  282. rank = models.IntegerField(db_index=True)
  283. count = models.IntegerField(default=0)
  284. year = models.IntegerField(**BNULL)
  285. month = models.IntegerField(**BNULL)
  286. week = models.IntegerField(**BNULL)
  287. day = models.IntegerField(**BNULL)
  288. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  289. series = models.ForeignKey(Series, on_delete=models.DO_NOTHING, **BNULL)
  290. artist = models.ForeignKey(Artist, on_delete=models.DO_NOTHING, **BNULL)
  291. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  292. period_start = models.DateTimeField(**BNULL)
  293. period_end = models.DateTimeField(**BNULL)
  294. def save(self, *args, **kwargs):
  295. profile = self.user.profile
  296. if self.week:
  297. # set start and end to start and end of week
  298. period = datetime.date.fromisocalendar(self.year, self.week, 1)
  299. self.period_start = start_of_week(period, profile)
  300. self.period_start = end_of_week(period, profile)
  301. if self.day:
  302. period = datetime.datetime(self.year, self.month, self.day)
  303. self.period_start = start_of_day(period, profile)
  304. self.period_end = end_of_day(period, profile)
  305. if self.month and not self.day:
  306. period = datetime.datetime(self.year, self.month, 1)
  307. self.period_start = start_of_month(period, profile)
  308. self.period_end = end_of_month(period, profile)
  309. super(ChartRecord, self).save(*args, **kwargs)
  310. @property
  311. def media_obj(self):
  312. media_obj = None
  313. if self.video:
  314. media_obj = self.video
  315. if self.track:
  316. media_obj = self.track
  317. if self.artist:
  318. media_obj = self.artist
  319. return media_obj
  320. @property
  321. def month_str(self) -> str:
  322. month_str = ""
  323. if self.month:
  324. month_str = calendar.month_name[self.month]
  325. return month_str
  326. @property
  327. def day_str(self) -> str:
  328. day_str = ""
  329. if self.day:
  330. day_str = str(self.day)
  331. return day_str
  332. @property
  333. def week_str(self) -> str:
  334. week_str = ""
  335. if self.week:
  336. week_str = str(self.week)
  337. return "Week " + week_str
  338. @property
  339. def period(self) -> str:
  340. period = str(self.year)
  341. if self.month:
  342. period = " ".join([self.month_str, period])
  343. if self.week:
  344. period = " ".join([self.week_str, period])
  345. if self.day:
  346. period = " ".join([self.day_str, period])
  347. return period
  348. @property
  349. def period_type(self) -> str:
  350. period = "year"
  351. if self.month:
  352. period = "month"
  353. if self.week:
  354. period = "week"
  355. if self.day:
  356. period = "day"
  357. return period
  358. def __str__(self):
  359. title = f"#{self.rank} in {self.period}"
  360. if self.day or self.week:
  361. title = f"#{self.rank} on {self.period}"
  362. return title
  363. def link(self):
  364. get_params = f"?date={self.year}"
  365. if self.week:
  366. get_params = get_params = get_params + f"-W{self.week}"
  367. if self.month:
  368. get_params = get_params = get_params + f"-{self.month}"
  369. if self.day:
  370. get_params = get_params = get_params + f"-{self.day}"
  371. if self.artist:
  372. get_params = get_params + "&media=Artist"
  373. return reverse("scrobbles:charts-home") + get_params
  374. @classmethod
  375. def build(cls, user, **kwargs):
  376. build_charts(user=user, **kwargs)
  377. @classmethod
  378. def for_year(cls, user, year):
  379. return cls.objects.filter(year=year, user=user)
  380. @classmethod
  381. def for_month(cls, user, year, month):
  382. return cls.objects.filter(year=year, month=month, user=user)
  383. @classmethod
  384. def for_day(cls, user, year, day, month):
  385. return cls.objects.filter(year=year, month=month, day=day, user=user)
  386. @classmethod
  387. def for_week(cls, user, year, week):
  388. return cls.objects.filter(year=year, week=week, user=user)
  389. class Scrobble(TimeStampedModel):
  390. """A scrobble tracks played media items by a user."""
  391. class MediaType(models.TextChoices):
  392. """Enum mapping a media model type to a string"""
  393. VIDEO = "Video", "Video"
  394. TRACK = "Track", "Track"
  395. PODCAST_EPISODE = "PodcastEpisode", "Podcast episode"
  396. SPORT_EVENT = "SportEvent", "Sport event"
  397. BOOK = "Book", "Book"
  398. VIDEO_GAME = "VideoGame", "Video game"
  399. BOARD_GAME = "BoardGame", "Board game"
  400. GEO_LOCATION = "GeoLocation", "GeoLocation"
  401. TRAIL = "Trail", "Trail"
  402. TASK = "Task", "Task"
  403. WEBPAGE = "WebPage", "Web Page"
  404. LIFE_EVENT = "LifeEvent", "Life event"
  405. MOOD = "Mood", "Mood"
  406. BRICKSET = "BrickSet", "Brick set"
  407. uuid = models.UUIDField(editable=False, **BNULL)
  408. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  409. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  410. podcast_episode = models.ForeignKey(
  411. PodcastEpisode, on_delete=models.DO_NOTHING, **BNULL
  412. )
  413. sport_event = models.ForeignKey(
  414. SportEvent, on_delete=models.DO_NOTHING, **BNULL
  415. )
  416. book = models.ForeignKey(Book, on_delete=models.DO_NOTHING, **BNULL)
  417. video_game = models.ForeignKey(
  418. VideoGame, on_delete=models.DO_NOTHING, **BNULL
  419. )
  420. board_game = models.ForeignKey(
  421. BoardGame, on_delete=models.DO_NOTHING, **BNULL
  422. )
  423. geo_location = models.ForeignKey(
  424. GeoLocation, on_delete=models.DO_NOTHING, **BNULL
  425. )
  426. trail = models.ForeignKey(Trail, on_delete=models.DO_NOTHING, **BNULL)
  427. task = models.ForeignKey(Task, on_delete=models.DO_NOTHING, **BNULL)
  428. web_page = models.ForeignKey(WebPage, on_delete=models.DO_NOTHING, **BNULL)
  429. life_event = models.ForeignKey(
  430. LifeEvent, on_delete=models.DO_NOTHING, **BNULL
  431. )
  432. mood = models.ForeignKey(Mood, on_delete=models.DO_NOTHING, **BNULL)
  433. brickset = models.ForeignKey(
  434. BrickSet, on_delete=models.DO_NOTHING, **BNULL
  435. )
  436. media_type = models.CharField(
  437. max_length=14, choices=MediaType.choices, default=MediaType.VIDEO
  438. )
  439. user = models.ForeignKey(
  440. User, blank=True, null=True, on_delete=models.DO_NOTHING
  441. )
  442. # Time keeping
  443. timestamp = models.DateTimeField(**BNULL)
  444. stop_timestamp = models.DateTimeField(**BNULL)
  445. playback_position_seconds = models.IntegerField(**BNULL)
  446. # Status indicators
  447. is_paused = models.BooleanField(default=False)
  448. played_to_completion = models.BooleanField(default=False)
  449. in_progress = models.BooleanField(default=True)
  450. # Metadata
  451. source = models.CharField(max_length=255, **BNULL)
  452. log = models.JSONField(
  453. **BNULL,
  454. encoder=logdata.ScrobbleLogDataEncoder,
  455. decoder=logdata.ScrobbleLogDataDecoder,
  456. )
  457. timezone = models.CharField(max_length=50, **BNULL)
  458. # Fields for keeping track of video game data
  459. videogame_save_data = models.FileField(
  460. upload_to="scrobbles/videogame_save_data/", **BNULL
  461. )
  462. gpx_file = models.FileField(upload_to="scrobbles/gpx_file/", **BNULL)
  463. screenshot = models.ImageField(
  464. upload_to="scrobbles/videogame_screenshot/", **BNULL
  465. )
  466. screenshot_small = ImageSpecField(
  467. source="screenshot",
  468. processors=[ResizeToFit(100, 100)],
  469. format="JPEG",
  470. options={"quality": 60},
  471. )
  472. screenshot_medium = ImageSpecField(
  473. source="screenshot",
  474. processors=[ResizeToFit(300, 300)],
  475. format="JPEG",
  476. options={"quality": 75},
  477. )
  478. long_play_seconds = models.BigIntegerField(**BNULL)
  479. long_play_complete = models.BooleanField(**BNULL)
  480. def save(self, *args, **kwargs):
  481. if not self.uuid:
  482. self.uuid = uuid4()
  483. if not self.timezone:
  484. timezone = settings.TIME_ZONE
  485. if self.user and self.user.profile:
  486. timezone = self.user.profile.timezone
  487. self.timezone = timezone
  488. # Microseconds mess up Django's filtering, and we don't need be that specific
  489. if self.timestamp:
  490. self.timestamp = self.timestamp.replace(microsecond=0)
  491. if self.media_obj:
  492. self.media_type = self.MediaType(self.media_obj.__class__.__name__)
  493. return super(Scrobble, self).save(*args, **kwargs)
  494. def push_to_archivebox(self):
  495. pushable_media = hasattr(
  496. self.media_obj, "push_to_archivebox"
  497. ) and callable(self.media_obj.push_to_archivebox)
  498. if pushable_media and self.user.profile.archivebox_url:
  499. try:
  500. self.media_obj.push_to_archivebox(
  501. url=self.user.profile.archivebox_url,
  502. username=self.user.profile.archivebox_username,
  503. password=self.user.profile.archivebox_password,
  504. )
  505. except Exception:
  506. logger.info(
  507. "Failed to push URL to archivebox",
  508. extra={
  509. "archivebox_url": self.user.profile.archivebox_url,
  510. "archivebox_username": self.user.profile.archivebox_username,
  511. },
  512. )
  513. @property
  514. def logdata(self) -> Optional[logdata.JSONDataclass]:
  515. if not self.media_obj.logdata_cls:
  516. logger.warn(
  517. f"Media type has no log data class, you should add one!",
  518. extra={"media_type": self.media_type, "scrobble_id": self.id},
  519. )
  520. return None
  521. log_dict = self.log
  522. if isinstance(self.log, str):
  523. # There's nothing stopping django from saving a string ina JSONField :(
  524. logger.warning(
  525. "[scrobbles] Received string in JSON data in log",
  526. extra={"log": self.log},
  527. )
  528. log_dict = json.loads(self.log)
  529. return self.media_obj.logdata_cls.from_dict(log_dict)
  530. def redirect_url(self, user_id) -> str:
  531. user = User.objects.filter(id=user_id).first()
  532. redirect_url = self.media_obj.get_absolute_url()
  533. if (
  534. self.media_type == self.MediaType.WEBPAGE
  535. and user
  536. and user.profile.redirect_to_webpage
  537. ):
  538. logger.info(f"Redirecting to {self.media_obj.url}")
  539. redirect_url = self.media_obj.get_read_url()
  540. return redirect_url
  541. @property
  542. def tzinfo(self):
  543. return pytz.timezone(self.timezone)
  544. @property
  545. def scrobble_media_key(self) -> str:
  546. return media_class_to_foreign_key(self.media_type) + "_id"
  547. @property
  548. def status(self) -> str:
  549. if self.is_paused:
  550. return "paused"
  551. if self.played_to_completion:
  552. return "finished"
  553. if self.in_progress:
  554. return "in-progress"
  555. return "zombie"
  556. @property
  557. def is_stale(self) -> bool:
  558. """Mark scrobble as stale if it's been more than an hour since it was updated
  559. Effectively, this allows 'resuming' a video scrobble within an hour of starting it.
  560. """
  561. is_stale = False
  562. now = timezone.now()
  563. seconds_since_last_update = (now - self.modified).seconds
  564. if seconds_since_last_update >= self.media_obj.SECONDS_TO_STALE:
  565. is_stale = True
  566. return is_stale
  567. @property
  568. def previous(self) -> "Scrobble":
  569. return (
  570. self.media_obj.scrobble_set.order_by("-timestamp")
  571. .filter(timestamp__lt=self.timestamp)
  572. .first()
  573. )
  574. @property
  575. def next(self) -> "Scrobble":
  576. return (
  577. self.media_obj.scrobble_set.order_by("timestamp")
  578. .filter(timestamp__gt=self.timestamp)
  579. .first()
  580. )
  581. @property
  582. def previous_by_media(self) -> "Scrobble":
  583. return (
  584. Scrobble.objects.filter(
  585. media_type=self.media_type,
  586. user=self.user,
  587. timestamp__lt=self.timestamp,
  588. )
  589. .order_by("-timestamp")
  590. .first()
  591. )
  592. @property
  593. def next_by_media(self) -> "Scrobble":
  594. return (
  595. Scrobble.objects.filter(
  596. media_type=self.media_type,
  597. user=self.user,
  598. timestamp__gt=self.timestamp,
  599. )
  600. .order_by("-timestamp")
  601. .first()
  602. )
  603. @property
  604. def previous_by_user(self) -> "Scrobble":
  605. return (
  606. Scrobble.objects.order_by("-timestamp")
  607. .filter(timestamp__lt=self.timestamp)
  608. .first()
  609. )
  610. @property
  611. def next_by_user(self) -> "Scrobble":
  612. return (
  613. Scrobble.objects.order_by("-timestamp")
  614. .filter(timestamp__gt=self.timestamp)
  615. .first()
  616. )
  617. @property
  618. def session_pages_read(self) -> Optional[int]:
  619. if not self.log.get("pages_read"):
  620. return 0
  621. return self.log.get("pages_read")
  622. @property
  623. def is_long_play(self) -> bool:
  624. return self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values()
  625. @property
  626. def percent_played(self) -> int:
  627. if not self.media_obj:
  628. return 0
  629. if self.media_obj and not self.media_obj.run_time_seconds:
  630. return 100
  631. if not self.playback_position_seconds and self.played_to_completion:
  632. return 100
  633. playback_seconds = self.playback_position_seconds
  634. if not playback_seconds:
  635. playback_seconds = (timezone.now() - self.timestamp).seconds
  636. run_time_secs = self.media_obj.run_time_seconds
  637. percent = int((playback_seconds / run_time_secs) * 100)
  638. if self.is_long_play:
  639. long_play_secs = 0
  640. if self.previous and not self.previous.long_play_complete:
  641. long_play_secs = self.previous.long_play_seconds or 0
  642. percent = int(
  643. ((playback_seconds + long_play_secs) / run_time_secs) * 100
  644. )
  645. return percent
  646. @property
  647. def probably_still_in_progress(self) -> bool:
  648. """Add our start time to our media run time to get when we expect to
  649. Audio tracks should be given a second or two of grace, videos should
  650. be given closer to 30 minutes, because the odds of watching it back to
  651. back are very slim.
  652. """
  653. is_in_progress = False
  654. padding_seconds = MEDIA_END_PADDING_SECONDS.get(self.media_type)
  655. if not padding_seconds:
  656. return is_in_progress
  657. if not self.media_obj:
  658. logger.info(
  659. "[scrobbling] scrobble has no media obj",
  660. extra={
  661. "media_id": self.media_obj,
  662. "scrobble_id": self.id,
  663. "media_type": self.media_type,
  664. "probably_still_in_progress": is_in_progress,
  665. },
  666. )
  667. return is_in_progress
  668. if not self.media_obj.run_time_seconds:
  669. logger.info(
  670. "[scrobbling] media has no run time seconds, cannot calculate end",
  671. extra={
  672. "media_id": self.media_obj.id,
  673. "scrobble_id": self.id,
  674. "media_type": self.media_type,
  675. "probably_still_in_progress": is_in_progress,
  676. },
  677. )
  678. return is_in_progress
  679. expected_end = self.timestamp + datetime.timedelta(
  680. seconds=self.media_obj.run_time_seconds
  681. )
  682. expected_end_padded = expected_end + datetime.timedelta(
  683. seconds=padding_seconds
  684. )
  685. # Take our start time, add our media length and an extra 30 min (1800s) is it still in the future? keep going
  686. is_in_progress = expected_end_padded > pendulum.now()
  687. logger.info(
  688. "[scrobbling] checking if we're probably still playing",
  689. extra={
  690. "media_id": self.media_obj.id,
  691. "scrobble_id": self.id,
  692. "media_type": self.media_type,
  693. "probably_still_in_progress": is_in_progress,
  694. },
  695. )
  696. return is_in_progress
  697. @property
  698. def can_be_updated(self) -> bool:
  699. if self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values():
  700. logger.info(
  701. "[scrobbling] cannot be updated, long play media",
  702. extra={
  703. "media_id": self.media_obj.id,
  704. "scrobble_id": self.id,
  705. "media_type": self.media_type,
  706. },
  707. )
  708. return False
  709. if self.percent_played >= 100 and not self.probably_still_in_progress:
  710. logger.info(
  711. "[scrobbling] cannot be updated, existing scrobble is 100% played",
  712. extra={
  713. "media_id": self.media_obj.id,
  714. "scrobble_id": self.id,
  715. "media_type": self.media_type,
  716. },
  717. )
  718. return False
  719. if self.is_stale:
  720. logger.info(
  721. "[scrobbling] cannot be udpated, stale",
  722. extra={
  723. "media_id": self.media_obj.id,
  724. "scrobble_id": self.id,
  725. "media_type": self.media_type,
  726. },
  727. )
  728. return False
  729. logger.info(
  730. "[scrobbling] can be updated",
  731. extra={
  732. "media_id": self.media_obj.id,
  733. "scrobble_id": self.id,
  734. "media_type": self.media_type,
  735. },
  736. )
  737. return True
  738. @property
  739. def media_obj(self):
  740. media_obj = None
  741. if self.video:
  742. media_obj = self.video
  743. if self.track:
  744. media_obj = self.track
  745. if self.podcast_episode:
  746. media_obj = self.podcast_episode
  747. if self.sport_event:
  748. media_obj = self.sport_event
  749. if self.book:
  750. media_obj = self.book
  751. if self.video_game:
  752. media_obj = self.video_game
  753. if self.board_game:
  754. media_obj = self.board_game
  755. if self.geo_location:
  756. media_obj = self.geo_location
  757. if self.web_page:
  758. media_obj = self.web_page
  759. if self.life_event:
  760. media_obj = self.life_event
  761. if self.mood:
  762. media_obj = self.mood
  763. if self.brickset:
  764. media_obj = self.brickset
  765. if self.trail:
  766. media_obj = self.trail
  767. if self.task:
  768. media_obj = self.task
  769. return media_obj
  770. def __str__(self):
  771. timestamp = self.timestamp.strftime("%Y-%m-%d")
  772. return f"Scrobble of {self.media_obj} ({timestamp})"
  773. def calc_reading_duration(self) -> int:
  774. duration = 0
  775. if self.logdata.page_data:
  776. for k, v in self.logdata.page_data.items():
  777. duration += v.get("duration")
  778. return duration
  779. def calc_pages_read(self) -> int:
  780. pages_read = 0
  781. if self.logdata.page_data:
  782. pages = [int(k) for k in self.logdata.page_data.keys()]
  783. pages.sort()
  784. if len(pages) == 1:
  785. pages_read = 1
  786. elif len(pages) >= 2:
  787. pages_read += pages[-1] - pages[0]
  788. else:
  789. pages_read = pages[-1] - pages[0]
  790. return pages_read
  791. @property
  792. def last_page_read(self) -> int:
  793. last_page = 0
  794. if self.logdata.page_data:
  795. pages = [int(k) for k in self.logdata.page_data.keys()]
  796. pages.sort()
  797. last_page = pages[-1]
  798. return last_page
  799. @classmethod
  800. def create_or_update(
  801. cls, media, user_id: int, scrobble_data: dict, **kwargs
  802. ) -> "Scrobble":
  803. key = media_class_to_foreign_key(media.__class__.__name__)
  804. media_query = models.Q(**{key: media})
  805. scrobble_data[key + "_id"] = media.id
  806. # Find our last scrobble of this media item (track, video, etc)
  807. scrobble = (
  808. cls.objects.filter(
  809. media_query,
  810. user_id=user_id,
  811. )
  812. .order_by("-timestamp")
  813. .first()
  814. )
  815. source = scrobble_data.get("source", "Vrobbler")
  816. mtype = media.__class__.__name__
  817. mopidy_status = scrobble_data.get("mopidy_status", None)
  818. # GeoLocations are a special case scrobble
  819. if mtype == cls.MediaType.GEO_LOCATION:
  820. logger.warning(
  821. f"[create_or_update] geoloc requires create_or_update_location"
  822. )
  823. scrobble = cls.create_or_update_location(
  824. media, scrobble_data, user_id
  825. )
  826. return scrobble
  827. logger.info(
  828. f"[create_or_update] check for existing scrobble to update ",
  829. extra={
  830. "scrobble_id": scrobble.id if scrobble else None,
  831. "media_type": mtype,
  832. "media_id": media.id,
  833. "scrobble_data": scrobble_data,
  834. },
  835. )
  836. scrobble_data["playback_status"] = scrobble_data.pop("status", None)
  837. # If it's marked as stopped, send it through our update mechanism, which will complete it
  838. if scrobble and (
  839. scrobble.can_be_updated
  840. or scrobble_data["playback_status"] == "stopped"
  841. ):
  842. if "log" in scrobble_data.keys() and scrobble.log:
  843. scrobble_data["log"] = scrobble.log | scrobble_data["log"]
  844. return scrobble.update(scrobble_data)
  845. # Discard status before creating
  846. scrobble_data.pop("playback_status")
  847. logger.info(
  848. f"[scrobbling] creating new scrobble",
  849. extra={
  850. "scrobble_id": scrobble.id if scrobble else None,
  851. "media_type": mtype,
  852. "media_id": media.id,
  853. "source": source,
  854. },
  855. )
  856. scrobble = cls.create(scrobble_data)
  857. return scrobble
  858. @classmethod
  859. def create_or_update_location(
  860. cls, location: GeoLocation, scrobble_data: dict, user_id: int
  861. ) -> "Scrobble":
  862. """Location is special type, where the current scrobble for a user is always the
  863. current active scrobble, and we only finish it a move on if we get a new location
  864. that is far enough (and far enough over the last three past scrobbles) to have
  865. actually moved.
  866. """
  867. key = media_class_to_foreign_key(location.__class__.__name__)
  868. scrobble_data[key + "_id"] = location.id
  869. scrobble = (
  870. cls.objects.filter(
  871. media_type=cls.MediaType.GEO_LOCATION,
  872. user_id=user_id,
  873. timestamp__lte=scrobble_data.get("timestamp"),
  874. )
  875. .order_by("-timestamp")
  876. .first()
  877. )
  878. logger.info(
  879. f"[scrobbling] fetching last location scrobble",
  880. extra={
  881. "scrobble_id": scrobble.id if scrobble else None,
  882. "media_type": cls.MediaType.GEO_LOCATION,
  883. "media_id": location.id,
  884. "scrobble_data": scrobble_data,
  885. },
  886. )
  887. if not scrobble:
  888. logger.info(
  889. f"[scrobbling] finished - no existing location scrobbles found",
  890. extra={
  891. "media_id": location.id,
  892. "media_type": cls.MediaType.GEO_LOCATION,
  893. },
  894. )
  895. return cls.create(scrobble_data)
  896. if scrobble.media_obj == location:
  897. logger.info(
  898. f"[scrobbling] finished - same location - not moved",
  899. extra={
  900. "media_type": cls.MediaType.GEO_LOCATION,
  901. "media_id": location.id,
  902. "scrobble_id": scrobble.id,
  903. "scrobble_media_id": scrobble.media_obj.id,
  904. },
  905. )
  906. return scrobble
  907. has_moved = location.has_moved(scrobble.media_obj)
  908. logger.info(
  909. f"[scrobbling] checking - has last location has moved?",
  910. extra={
  911. "scrobble_id": scrobble.id,
  912. "scrobble_media_id": scrobble.media_obj.id,
  913. "media_type": cls.MediaType.GEO_LOCATION,
  914. "media_id": location.id,
  915. "has_moved": has_moved,
  916. },
  917. )
  918. if not has_moved:
  919. logger.info(
  920. f"[scrobbling] finished - not from old location - not moved",
  921. extra={
  922. "scrobble_id": scrobble.id,
  923. "media_id": location.id,
  924. "media_type": cls.MediaType.GEO_LOCATION,
  925. "old_media__id": scrobble.media_obj.id,
  926. },
  927. )
  928. return scrobble
  929. if existing_locations := location.in_proximity(named=True):
  930. existing_location = existing_locations.first()
  931. ts = int(pendulum.now().timestamp())
  932. scrobble.log[
  933. ts
  934. ] = f"Location {location.id} too close to this scrobble"
  935. scrobble.save(update_fields=["log"])
  936. logger.info(
  937. f"[scrobbling] finished - found existing named location",
  938. extra={
  939. "media_id": location.id,
  940. "media_type": cls.MediaType.GEO_LOCATION,
  941. "old_media_id": existing_location.id,
  942. },
  943. )
  944. return scrobble
  945. scrobble.stop(force_finish=True)
  946. scrobble = cls.create(scrobble_data)
  947. logger.info(
  948. f"[scrobbling] finished - created for location",
  949. extra={
  950. "scrobble_id": scrobble.id,
  951. "media_id": location.id,
  952. "scrobble_data": scrobble_data,
  953. "media_type": cls.MediaType.GEO_LOCATION,
  954. "source": scrobble_data.get("source"),
  955. },
  956. )
  957. return scrobble
  958. def update(self, scrobble_data: dict) -> "Scrobble":
  959. # Status is a field we get from Mopidy, which refuses to poll us
  960. playback_status = scrobble_data.pop("playback_status", None)
  961. logger.info(
  962. "[update] called",
  963. extra={
  964. "scrobble_id": self.id,
  965. "scrobble_data": scrobble_data,
  966. "media_type": self.media_type,
  967. "playback_status": playback_status,
  968. },
  969. )
  970. if self.beyond_completion_percent:
  971. playback_status = "stopped"
  972. if playback_status == "stopped":
  973. self.stop()
  974. if playback_status == "paused":
  975. self.pause()
  976. if playback_status == "resumed":
  977. self.resume()
  978. if playback_status != "resumed":
  979. scrobble_data["stop_timestamp"] = (
  980. scrobble_data.pop("timestamp", None) or timezone.now()
  981. )
  982. # timestamp should be more-or-less immutable
  983. scrobble_data.pop("timestamp", None)
  984. update_fields = []
  985. for key, value in scrobble_data.items():
  986. setattr(self, key, value)
  987. update_fields.append(key)
  988. self.save(update_fields=update_fields)
  989. return self
  990. @classmethod
  991. def create(
  992. cls,
  993. scrobble_data: dict,
  994. ) -> "Scrobble":
  995. scrobble = cls.objects.create(
  996. **scrobble_data,
  997. )
  998. return scrobble
  999. def stop(self, force_finish=False) -> None:
  1000. self.stop_timestamp = timezone.now()
  1001. self.played_to_completion = True
  1002. self.in_progress = False
  1003. if not self.playback_position_seconds:
  1004. self.playback_position_seconds = int(
  1005. (self.stop_timestamp - self.timestamp).total_seconds()
  1006. )
  1007. self.save(
  1008. update_fields=[
  1009. "in_progress",
  1010. "played_to_completion",
  1011. "stop_timestamp",
  1012. "playback_position_seconds",
  1013. ]
  1014. )
  1015. class_name = self.media_obj.__class__.__name__
  1016. if class_name in LONG_PLAY_MEDIA.values():
  1017. self.finish_long_play()
  1018. logger.info(
  1019. f"[scrobbling] stopped",
  1020. extra={
  1021. "scrobble_id": self.id,
  1022. "media_id": self.media_obj.id,
  1023. "media_type": self.media_type,
  1024. "source": self.source,
  1025. },
  1026. )
  1027. def pause(self) -> None:
  1028. if self.is_paused:
  1029. logger.warning(f"{self.id} - already paused - {self.source}")
  1030. return
  1031. self.is_paused = True
  1032. self.save(update_fields=["is_paused"])
  1033. logger.info(
  1034. f"[scrobbling] paused",
  1035. extra={
  1036. "scrobble_id": self.id,
  1037. "media_type": self.media_type,
  1038. "source": self.source,
  1039. },
  1040. )
  1041. def resume(self) -> None:
  1042. if self.is_paused or not self.in_progress:
  1043. self.is_paused = False
  1044. self.in_progress = True
  1045. self.save(update_fields=["is_paused", "in_progress"])
  1046. logger.info(
  1047. f"[scrobbling] resumed",
  1048. extra={
  1049. "scrobble_id": self.id,
  1050. "media_type": self.media_type,
  1051. "source": self.source,
  1052. },
  1053. )
  1054. def cancel(self) -> None:
  1055. self.delete()
  1056. def update_ticks(self, data) -> None:
  1057. self.playback_position_seconds = data.get("playback_position_seconds")
  1058. self.save(update_fields=["playback_position_seconds"])
  1059. def finish_long_play(self):
  1060. seconds_elapsed = (timezone.now() - self.timestamp).seconds
  1061. past_seconds = 0
  1062. # Set our playback seconds, and calc long play seconds
  1063. self.playback_position_seconds = seconds_elapsed
  1064. if self.previous:
  1065. past_seconds = self.previous.long_play_seconds
  1066. self.long_play_seconds = past_seconds + seconds_elapsed
  1067. # Long play scrobbles are always finished when we say they are
  1068. self.played_to_completion = True
  1069. self.save(
  1070. update_fields=[
  1071. "playback_position_seconds",
  1072. "played_to_completion",
  1073. "long_play_seconds",
  1074. ]
  1075. )
  1076. logger.info(
  1077. f"[scrobbling] finishing long play",
  1078. extra={
  1079. "scrobble_id": self.id,
  1080. },
  1081. )
  1082. @property
  1083. def beyond_completion_percent(self) -> bool:
  1084. """Returns true if our media is beyond our completion percent, unless
  1085. our type is geolocation in which case we always return false
  1086. """
  1087. beyond_completion = (
  1088. self.percent_played >= self.media_obj.COMPLETION_PERCENT
  1089. )
  1090. if self.media_type == "GeoLocation":
  1091. logger.info(
  1092. f"[scrobbling] locations are ONLY completed when new one is created",
  1093. extra={
  1094. "scrobble_id": self.id,
  1095. "media_type": self.media_type,
  1096. "beyond_completion": beyond_completion,
  1097. },
  1098. )
  1099. beyond_completion = False
  1100. return beyond_completion