models.py 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325
  1. import calendar
  2. import datetime
  3. import json
  4. import logging
  5. from typing import Optional
  6. from uuid import uuid4
  7. import pendulum
  8. import pytz
  9. import requests
  10. from beers.models import Beer
  11. from boardgames.models import BoardGame
  12. from books.koreader import process_koreader_sqlite_file
  13. from books.models import Book
  14. from bricksets.models import BrickSet
  15. from django.conf import settings
  16. from django.contrib.auth import get_user_model
  17. from django.db import models
  18. from django.urls import reverse
  19. from django.utils import timezone
  20. from django_extensions.db.models import TimeStampedModel
  21. from imagekit.models import ImageSpecField
  22. from imagekit.processors import ResizeToFit
  23. from lifeevents.models import LifeEvent
  24. from locations.models import GeoLocation
  25. from moods.models import Mood
  26. from music.lastfm import LastFM
  27. from music.models import Artist, Track
  28. from podcasts.models import PodcastEpisode
  29. from profiles.models import UserProfile
  30. from profiles.utils import (
  31. end_of_day,
  32. end_of_month,
  33. end_of_week,
  34. start_of_day,
  35. start_of_month,
  36. start_of_week,
  37. )
  38. from scrobbles import dataclasses as logdata
  39. from scrobbles.constants import LONG_PLAY_MEDIA
  40. from scrobbles.stats import build_charts
  41. from scrobbles.utils import media_class_to_foreign_key
  42. from sports.models import SportEvent
  43. from tasks.models import Task
  44. from trails.models import Trail
  45. from videogames import retroarch
  46. from videogames.models import VideoGame
  47. from videos.models import Series, Video
  48. from webpages.models import WebPage
  49. from vrobbler.apps.scrobbles.constants import MEDIA_END_PADDING_SECONDS
  50. logger = logging.getLogger(__name__)
  51. User = get_user_model()
  52. BNULL = {"blank": True, "null": True}
  53. POINTS_FOR_MOVEMENT_HISTORY = int(
  54. getattr(settings, "POINTS_FOR_MOVEMENT_HISTORY", 3)
  55. )
  56. class BaseFileImportMixin(TimeStampedModel):
  57. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  58. uuid = models.UUIDField(editable=False, default=uuid4)
  59. processing_started = models.DateTimeField(**BNULL)
  60. processed_finished = models.DateTimeField(**BNULL)
  61. process_log = models.TextField(**BNULL)
  62. process_count = models.IntegerField(**BNULL)
  63. class Meta:
  64. abstract = True
  65. def __str__(self):
  66. return f"{self.import_type} import on {self.human_start}"
  67. @property
  68. def human_start(self):
  69. start = "Unknown"
  70. if self.processing_started:
  71. start = self.processing_started.strftime("%B %d, %Y at %H:%M")
  72. return start
  73. @property
  74. def import_type(self) -> str:
  75. return "Unknown Import Source"
  76. def process(self, force=False):
  77. logger.warning("Process not implemented")
  78. def undo(self, dryrun=False):
  79. """Accepts the log from a scrobble import and removes the scrobbles"""
  80. from scrobbles.models import Scrobble
  81. if not self.process_log:
  82. logger.warning("No lines in process log found to undo")
  83. return
  84. for line in self.process_log.split("\n"):
  85. scrobble_id = line.split("\t")[0]
  86. scrobble = Scrobble.objects.filter(id=scrobble_id).first()
  87. if not scrobble:
  88. logger.warning(
  89. f"Could not find scrobble {scrobble_id} to undo"
  90. )
  91. continue
  92. logger.info(f"Removing scrobble {scrobble_id}")
  93. if not dryrun:
  94. scrobble.delete()
  95. self.processed_finished = None
  96. self.processing_started = None
  97. self.process_count = None
  98. self.process_log = ""
  99. self.save(
  100. update_fields=[
  101. "processed_finished",
  102. "processing_started",
  103. "process_log",
  104. "process_count",
  105. ]
  106. )
  107. def scrobbles(self) -> models.QuerySet:
  108. scrobble_ids = []
  109. if self.process_log:
  110. for line in self.process_log.split("\n"):
  111. sid = line.split("\t")[0]
  112. if sid:
  113. scrobble_ids.append(sid)
  114. return Scrobble.objects.filter(id__in=scrobble_ids)
  115. def mark_started(self):
  116. self.processing_started = timezone.now()
  117. self.save(update_fields=["processing_started"])
  118. def mark_finished(self):
  119. self.processed_finished = timezone.now()
  120. self.save(update_fields=["processed_finished"])
  121. def record_log(self, scrobbles):
  122. self.process_log = ""
  123. if not scrobbles:
  124. self.process_count = 0
  125. self.save(update_fields=["process_log", "process_count"])
  126. return
  127. for count, scrobble in enumerate(scrobbles):
  128. scrobble_str = f"{scrobble.id}\t{scrobble.timestamp}\t{scrobble.media_obj.title}"
  129. log_line = f"{scrobble_str}"
  130. if count > 0:
  131. log_line = "\n" + log_line
  132. self.process_log += log_line
  133. self.process_count = len(scrobbles)
  134. self.save(update_fields=["process_log", "process_count"])
  135. @property
  136. def upload_file_path(self):
  137. raise NotImplementedError
  138. class KoReaderImport(BaseFileImportMixin):
  139. class Meta:
  140. verbose_name = "KOReader Import"
  141. @property
  142. def import_type(self) -> str:
  143. return "KOReader"
  144. def get_absolute_url(self):
  145. return reverse(
  146. "scrobbles:koreader-import-detail", kwargs={"slug": self.uuid}
  147. )
  148. def get_path(instance, filename):
  149. extension = filename.split(".")[-1]
  150. uuid = instance.uuid
  151. return f"koreader-uploads/{uuid}.{extension}"
  152. @property
  153. def upload_file_path(self) -> str:
  154. if getattr(settings, "USE_S3_STORAGE"):
  155. path = self.sqlite_file.url
  156. else:
  157. path = self.sqlite_file.path
  158. return path
  159. sqlite_file = models.FileField(upload_to=get_path, **BNULL)
  160. def process(self, force=False):
  161. if self.processed_finished and not force:
  162. logger.info(
  163. f"{self} already processed on {self.processed_finished}"
  164. )
  165. return
  166. self.mark_started()
  167. scrobbles = process_koreader_sqlite_file(
  168. self.upload_file_path, self.user.id
  169. )
  170. self.record_log(scrobbles)
  171. self.mark_finished()
  172. class AudioScrobblerTSVImport(BaseFileImportMixin):
  173. class Meta:
  174. verbose_name = "AudioScrobbler TSV Import"
  175. @property
  176. def import_type(self) -> str:
  177. return "AudiosScrobbler"
  178. def get_absolute_url(self):
  179. return reverse(
  180. "scrobbles:tsv-import-detail", kwargs={"slug": self.uuid}
  181. )
  182. def get_path(instance, filename):
  183. extension = filename.split(".")[-1]
  184. uuid = instance.uuid
  185. return f"audioscrobbler-uploads/{uuid}.{extension}"
  186. @property
  187. def upload_file_path(self):
  188. if getattr(settings, "USE_S3_STORAGE"):
  189. path = self.tsv_file.url
  190. else:
  191. path = self.tsv_file.path
  192. return path
  193. tsv_file = models.FileField(upload_to=get_path, **BNULL)
  194. def process(self, force=False):
  195. from scrobbles.tsv import process_audioscrobbler_tsv_file
  196. if self.processed_finished and not force:
  197. logger.info(
  198. f"{self} already processed on {self.processed_finished}"
  199. )
  200. return
  201. self.mark_started()
  202. tz = None
  203. user_id = None
  204. if self.user:
  205. user_id = self.user.id
  206. tz = self.user.profile.tzinfo
  207. scrobbles = process_audioscrobbler_tsv_file(
  208. self.upload_file_path, user_id, user_tz=tz
  209. )
  210. self.record_log(scrobbles)
  211. self.mark_finished()
  212. class LastFmImport(BaseFileImportMixin):
  213. class Meta:
  214. verbose_name = "Last.FM Import"
  215. @property
  216. def import_type(self) -> str:
  217. return "LastFM"
  218. def get_absolute_url(self):
  219. return reverse(
  220. "scrobbles:lastfm-import-detail", kwargs={"slug": self.uuid}
  221. )
  222. def process(self, import_all=False):
  223. """Import scrobbles found on LastFM"""
  224. if self.processed_finished:
  225. logger.info(
  226. f"{self} already processed on {self.processed_finished}"
  227. )
  228. return
  229. last_import = None
  230. if not import_all:
  231. try:
  232. last_import = LastFmImport.objects.exclude(id=self.id).last()
  233. except:
  234. pass
  235. if not import_all and not last_import:
  236. logger.warn(
  237. "No previous import, to import all Last.fm scrobbles, pass import_all=True"
  238. )
  239. return
  240. lastfm = LastFM(self.user)
  241. last_processed = None
  242. if last_import:
  243. last_processed = last_import.processed_finished
  244. self.mark_started()
  245. scrobbles = lastfm.import_from_lastfm(last_processed)
  246. self.record_log(scrobbles)
  247. self.mark_finished()
  248. class RetroarchImport(BaseFileImportMixin):
  249. class Meta:
  250. verbose_name = "Retroarch Import"
  251. @property
  252. def import_type(self) -> str:
  253. return "Retroarch"
  254. def get_absolute_url(self):
  255. return reverse(
  256. "scrobbles:retroarch-import-detail", kwargs={"slug": self.uuid}
  257. )
  258. def process(self, import_all=False, force=False):
  259. """Import scrobbles found on Retroarch"""
  260. if self.processed_finished and not force:
  261. logger.info(
  262. f"{self} already processed on {self.processed_finished}"
  263. )
  264. return
  265. if force:
  266. logger.info(f"You told me to force import from Retroarch")
  267. if not self.user.profile.retroarch_path:
  268. logger.info(
  269. "Tying to import Retroarch logs, but user has no retroarch_path configured"
  270. )
  271. self.mark_started()
  272. scrobbles = retroarch.import_retroarch_lrtl_files(
  273. self.user.profile.retroarch_path,
  274. self.user.id,
  275. )
  276. self.record_log(scrobbles)
  277. self.mark_finished()
  278. class ChartRecord(TimeStampedModel):
  279. """Sort of like a materialized view for what we could dynamically generate,
  280. but would kill the DB as it gets larger. Collects time-based records
  281. generated by a cron-like archival job
  282. 1972 by Josh Rouse - #3 in 2023, January
  283. """
  284. user = models.ForeignKey(User, on_delete=models.DO_NOTHING, **BNULL)
  285. rank = models.IntegerField(db_index=True)
  286. count = models.IntegerField(default=0)
  287. year = models.IntegerField(**BNULL)
  288. month = models.IntegerField(**BNULL)
  289. week = models.IntegerField(**BNULL)
  290. day = models.IntegerField(**BNULL)
  291. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  292. series = models.ForeignKey(Series, on_delete=models.DO_NOTHING, **BNULL)
  293. artist = models.ForeignKey(Artist, on_delete=models.DO_NOTHING, **BNULL)
  294. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  295. period_start = models.DateTimeField(**BNULL)
  296. period_end = models.DateTimeField(**BNULL)
  297. def save(self, *args, **kwargs):
  298. profile = self.user.profile
  299. if self.week:
  300. # set start and end to start and end of week
  301. period = datetime.date.fromisocalendar(self.year, self.week, 1)
  302. self.period_start = start_of_week(period, profile)
  303. self.period_start = end_of_week(period, profile)
  304. if self.day:
  305. period = datetime.datetime(self.year, self.month, self.day)
  306. self.period_start = start_of_day(period, profile)
  307. self.period_end = end_of_day(period, profile)
  308. if self.month and not self.day:
  309. period = datetime.datetime(self.year, self.month, 1)
  310. self.period_start = start_of_month(period, profile)
  311. self.period_end = end_of_month(period, profile)
  312. super(ChartRecord, self).save(*args, **kwargs)
  313. @property
  314. def media_obj(self):
  315. media_obj = None
  316. if self.video:
  317. media_obj = self.video
  318. if self.track:
  319. media_obj = self.track
  320. if self.artist:
  321. media_obj = self.artist
  322. return media_obj
  323. @property
  324. def month_str(self) -> str:
  325. month_str = ""
  326. if self.month:
  327. month_str = calendar.month_name[self.month]
  328. return month_str
  329. @property
  330. def day_str(self) -> str:
  331. day_str = ""
  332. if self.day:
  333. day_str = str(self.day)
  334. return day_str
  335. @property
  336. def week_str(self) -> str:
  337. week_str = ""
  338. if self.week:
  339. week_str = str(self.week)
  340. return "Week " + week_str
  341. @property
  342. def period(self) -> str:
  343. period = str(self.year)
  344. if self.month:
  345. period = " ".join([self.month_str, period])
  346. if self.week:
  347. period = " ".join([self.week_str, period])
  348. if self.day:
  349. period = " ".join([self.day_str, period])
  350. return period
  351. @property
  352. def period_type(self) -> str:
  353. period = "year"
  354. if self.month:
  355. period = "month"
  356. if self.week:
  357. period = "week"
  358. if self.day:
  359. period = "day"
  360. return period
  361. def __str__(self):
  362. title = f"#{self.rank} in {self.period}"
  363. if self.day or self.week:
  364. title = f"#{self.rank} on {self.period}"
  365. return title
  366. def link(self):
  367. get_params = f"?date={self.year}"
  368. if self.week:
  369. get_params = get_params = get_params + f"-W{self.week}"
  370. if self.month:
  371. get_params = get_params = get_params + f"-{self.month}"
  372. if self.day:
  373. get_params = get_params = get_params + f"-{self.day}"
  374. if self.artist:
  375. get_params = get_params + "&media=Artist"
  376. return reverse("scrobbles:charts-home") + get_params
  377. @classmethod
  378. def build(cls, user, **kwargs):
  379. build_charts(user=user, **kwargs)
  380. @classmethod
  381. def for_year(cls, user, year):
  382. return cls.objects.filter(year=year, user=user)
  383. @classmethod
  384. def for_month(cls, user, year, month):
  385. return cls.objects.filter(year=year, month=month, user=user)
  386. @classmethod
  387. def for_day(cls, user, year, day, month):
  388. return cls.objects.filter(year=year, month=month, day=day, user=user)
  389. @classmethod
  390. def for_week(cls, user, year, week):
  391. return cls.objects.filter(year=year, week=week, user=user)
  392. class Scrobble(TimeStampedModel):
  393. """A scrobble tracks played media items by a user."""
  394. class MediaType(models.TextChoices):
  395. """Enum mapping a media model type to a string"""
  396. VIDEO = "Video", "Video"
  397. TRACK = "Track", "Track"
  398. PODCAST_EPISODE = "PodcastEpisode", "Podcast episode"
  399. SPORT_EVENT = "SportEvent", "Sport event"
  400. BOOK = "Book", "Book"
  401. VIDEO_GAME = "VideoGame", "Video game"
  402. BOARD_GAME = "BoardGame", "Board game"
  403. GEO_LOCATION = "GeoLocation", "GeoLocation"
  404. TRAIL = "Trail", "Trail"
  405. BEER = "Beer", "Beer"
  406. TASK = "Task", "Task"
  407. WEBPAGE = "WebPage", "Web Page"
  408. LIFE_EVENT = "LifeEvent", "Life event"
  409. MOOD = "Mood", "Mood"
  410. BRICKSET = "BrickSet", "Brick set"
  411. uuid = models.UUIDField(editable=False, **BNULL)
  412. video = models.ForeignKey(Video, on_delete=models.DO_NOTHING, **BNULL)
  413. track = models.ForeignKey(Track, on_delete=models.DO_NOTHING, **BNULL)
  414. podcast_episode = models.ForeignKey(
  415. PodcastEpisode, on_delete=models.DO_NOTHING, **BNULL
  416. )
  417. sport_event = models.ForeignKey(
  418. SportEvent, on_delete=models.DO_NOTHING, **BNULL
  419. )
  420. book = models.ForeignKey(Book, on_delete=models.DO_NOTHING, **BNULL)
  421. video_game = models.ForeignKey(
  422. VideoGame, on_delete=models.DO_NOTHING, **BNULL
  423. )
  424. board_game = models.ForeignKey(
  425. BoardGame, on_delete=models.DO_NOTHING, **BNULL
  426. )
  427. geo_location = models.ForeignKey(
  428. GeoLocation, on_delete=models.DO_NOTHING, **BNULL
  429. )
  430. beer = models.ForeignKey(Beer, on_delete=models.DO_NOTHING, **BNULL)
  431. trail = models.ForeignKey(Trail, on_delete=models.DO_NOTHING, **BNULL)
  432. task = models.ForeignKey(Task, on_delete=models.DO_NOTHING, **BNULL)
  433. web_page = models.ForeignKey(WebPage, on_delete=models.DO_NOTHING, **BNULL)
  434. life_event = models.ForeignKey(
  435. LifeEvent, on_delete=models.DO_NOTHING, **BNULL
  436. )
  437. mood = models.ForeignKey(Mood, on_delete=models.DO_NOTHING, **BNULL)
  438. brickset = models.ForeignKey(
  439. BrickSet, on_delete=models.DO_NOTHING, **BNULL
  440. )
  441. media_type = models.CharField(
  442. max_length=14, choices=MediaType.choices, default=MediaType.VIDEO
  443. )
  444. user = models.ForeignKey(
  445. User, blank=True, null=True, on_delete=models.DO_NOTHING
  446. )
  447. # Time keeping
  448. timestamp = models.DateTimeField(**BNULL)
  449. stop_timestamp = models.DateTimeField(**BNULL)
  450. playback_position_seconds = models.IntegerField(**BNULL)
  451. # Status indicators
  452. is_paused = models.BooleanField(default=False)
  453. played_to_completion = models.BooleanField(default=False)
  454. in_progress = models.BooleanField(default=True)
  455. # Metadata
  456. source = models.CharField(max_length=255, **BNULL)
  457. log = models.JSONField(
  458. **BNULL,
  459. default=dict,
  460. encoder=logdata.ScrobbleLogDataEncoder,
  461. decoder=logdata.ScrobbleLogDataDecoder,
  462. )
  463. timezone = models.CharField(max_length=50, **BNULL)
  464. # Fields for keeping track of video game data
  465. videogame_save_data = models.FileField(
  466. upload_to="scrobbles/videogame_save_data/", **BNULL
  467. )
  468. gpx_file = models.FileField(upload_to="scrobbles/gpx_file/", **BNULL)
  469. screenshot = models.ImageField(
  470. upload_to="scrobbles/videogame_screenshot/", **BNULL
  471. )
  472. screenshot_small = ImageSpecField(
  473. source="screenshot",
  474. processors=[ResizeToFit(100, 100)],
  475. format="JPEG",
  476. options={"quality": 60},
  477. )
  478. screenshot_medium = ImageSpecField(
  479. source="screenshot",
  480. processors=[ResizeToFit(300, 300)],
  481. format="JPEG",
  482. options={"quality": 75},
  483. )
  484. long_play_seconds = models.BigIntegerField(**BNULL)
  485. long_play_complete = models.BooleanField(**BNULL)
  486. @property
  487. def last_serial_scrobble(self) -> Optional["Scrobble"]:
  488. from scrobbles.models import Scrobble
  489. if self.logdata and self.logdata.serial_scrobble_id:
  490. return Scrobble.objects.filter(
  491. id=self.logdata.serial_scrobble_id
  492. ).first()
  493. def save(self, *args, **kwargs):
  494. if not self.uuid:
  495. self.uuid = uuid4()
  496. if not self.timezone:
  497. timezone = settings.TIME_ZONE
  498. if self.user and self.user.profile:
  499. timezone = self.user.profile.timezone
  500. self.timezone = timezone
  501. # Microseconds mess up Django's filtering, and we don't need be that specific
  502. if self.timestamp:
  503. self.timestamp = self.timestamp.replace(microsecond=0)
  504. if self.media_obj:
  505. self.media_type = self.MediaType(self.media_obj.__class__.__name__)
  506. return super(Scrobble, self).save(*args, **kwargs)
  507. def push_to_archivebox(self):
  508. pushable_media = hasattr(
  509. self.media_obj, "push_to_archivebox"
  510. ) and callable(self.media_obj.push_to_archivebox)
  511. if pushable_media and self.user.profile.archivebox_url:
  512. try:
  513. self.media_obj.push_to_archivebox(
  514. url=self.user.profile.archivebox_url,
  515. username=self.user.profile.archivebox_username,
  516. password=self.user.profile.archivebox_password,
  517. )
  518. except Exception:
  519. logger.info(
  520. "Failed to push URL to archivebox",
  521. extra={
  522. "archivebox_url": self.user.profile.archivebox_url,
  523. "archivebox_username": self.user.profile.archivebox_username,
  524. },
  525. )
  526. @property
  527. def logdata(self) -> Optional[logdata.JSONDataclass]:
  528. if self.media_obj:
  529. logdata_cls = self.media_obj.logdata_cls
  530. else:
  531. logdata_cls = logdata.ScrobbleLogData
  532. log_dict = self.log
  533. if isinstance(self.log, str):
  534. # There's nothing stopping django from saving a string in a JSONField :(
  535. logger.warning(
  536. "[scrobbles] Received string in JSON data in log",
  537. extra={"log": self.log},
  538. )
  539. log_dict = json.loads(self.log)
  540. if not log_dict:
  541. log_dict = {}
  542. return logdata_cls.from_dict(log_dict)
  543. def redirect_url(self, user_id) -> str:
  544. user = User.objects.filter(id=user_id).first()
  545. redirect_url = self.media_obj.get_absolute_url()
  546. if (
  547. self.media_type == self.MediaType.WEBPAGE
  548. and user
  549. and user.profile.redirect_to_webpage
  550. ):
  551. logger.info(f"Redirecting to {self.media_obj.url}")
  552. redirect_url = self.media_obj.get_read_url()
  553. return redirect_url
  554. @property
  555. def tzinfo(self):
  556. return pytz.timezone(self.timezone)
  557. @property
  558. def scrobble_media_key(self) -> str:
  559. return media_class_to_foreign_key(self.media_type) + "_id"
  560. @property
  561. def status(self) -> str:
  562. if self.is_paused:
  563. return "paused"
  564. if self.played_to_completion:
  565. return "finished"
  566. if self.in_progress:
  567. return "in-progress"
  568. return "zombie"
  569. @property
  570. def is_stale(self) -> bool:
  571. """Mark scrobble as stale if it's been more than an hour since it was updated
  572. Effectively, this allows 'resuming' a video scrobble within an hour of starting it.
  573. """
  574. is_stale = False
  575. now = timezone.now()
  576. seconds_since_last_update = (now - self.modified).seconds
  577. if seconds_since_last_update >= self.media_obj.SECONDS_TO_STALE:
  578. is_stale = True
  579. return is_stale
  580. @property
  581. def previous(self) -> "Scrobble":
  582. return (
  583. self.media_obj.scrobble_set.order_by("-timestamp")
  584. .filter(timestamp__lt=self.timestamp)
  585. .first()
  586. )
  587. @property
  588. def next(self) -> "Scrobble":
  589. return (
  590. self.media_obj.scrobble_set.order_by("timestamp")
  591. .filter(timestamp__gt=self.timestamp)
  592. .first()
  593. )
  594. @property
  595. def previous_by_media(self) -> "Scrobble":
  596. return (
  597. Scrobble.objects.filter(
  598. media_type=self.media_type,
  599. user=self.user,
  600. timestamp__lt=self.timestamp,
  601. )
  602. .order_by("-timestamp")
  603. .first()
  604. )
  605. @property
  606. def next_by_media(self) -> "Scrobble":
  607. return (
  608. Scrobble.objects.filter(
  609. media_type=self.media_type,
  610. user=self.user,
  611. timestamp__gt=self.timestamp,
  612. )
  613. .order_by("-timestamp")
  614. .first()
  615. )
  616. @property
  617. def previous_by_user(self) -> "Scrobble":
  618. return (
  619. Scrobble.objects.order_by("-timestamp")
  620. .filter(timestamp__lt=self.timestamp)
  621. .first()
  622. )
  623. @property
  624. def next_by_user(self) -> "Scrobble":
  625. return (
  626. Scrobble.objects.order_by("-timestamp")
  627. .filter(timestamp__gt=self.timestamp)
  628. .first()
  629. )
  630. @property
  631. def session_pages_read(self) -> Optional[int]:
  632. if not self.log.get("pages_read"):
  633. return 0
  634. return self.log.get("pages_read")
  635. @property
  636. def is_long_play(self) -> bool:
  637. return self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values()
  638. @property
  639. def percent_played(self) -> int:
  640. if not self.media_obj:
  641. return 0
  642. if self.media_obj and not self.media_obj.run_time_seconds:
  643. return 100
  644. if not self.playback_position_seconds and self.played_to_completion:
  645. return 100
  646. playback_seconds = self.playback_position_seconds
  647. if not playback_seconds:
  648. playback_seconds = (timezone.now() - self.timestamp).seconds
  649. run_time_secs = self.media_obj.run_time_seconds
  650. percent = int((playback_seconds / run_time_secs) * 100)
  651. if self.is_long_play:
  652. long_play_secs = 0
  653. if self.previous and not self.previous.long_play_complete:
  654. long_play_secs = self.previous.long_play_seconds or 0
  655. percent = int(
  656. ((playback_seconds + long_play_secs) / run_time_secs) * 100
  657. )
  658. return percent
  659. @property
  660. def probably_still_in_progress(self) -> bool:
  661. """Add our start time to our media run time to get when we expect to
  662. Audio tracks should be given a second or two of grace, videos should
  663. be given closer to 30 minutes, because the odds of watching it back to
  664. back are very slim.
  665. """
  666. is_in_progress = False
  667. padding_seconds = MEDIA_END_PADDING_SECONDS.get(self.media_type)
  668. if not padding_seconds:
  669. return is_in_progress
  670. if not self.media_obj:
  671. logger.info(
  672. "[scrobbling] scrobble has no media obj",
  673. extra={
  674. "media_id": self.media_obj,
  675. "scrobble_id": self.id,
  676. "media_type": self.media_type,
  677. "probably_still_in_progress": is_in_progress,
  678. },
  679. )
  680. return is_in_progress
  681. if not self.media_obj.run_time_seconds:
  682. logger.info(
  683. "[scrobbling] media has no run time seconds, cannot calculate end",
  684. extra={
  685. "media_id": self.media_obj.id,
  686. "scrobble_id": self.id,
  687. "media_type": self.media_type,
  688. "probably_still_in_progress": is_in_progress,
  689. },
  690. )
  691. return is_in_progress
  692. expected_end = self.timestamp + datetime.timedelta(
  693. seconds=self.media_obj.run_time_seconds
  694. )
  695. expected_end_padded = expected_end + datetime.timedelta(
  696. seconds=padding_seconds
  697. )
  698. # Take our start time, add our media length and an extra 30 min (1800s) is it still in the future? keep going
  699. is_in_progress = expected_end_padded > pendulum.now()
  700. logger.info(
  701. "[scrobbling] checking if we're probably still playing",
  702. extra={
  703. "media_id": self.media_obj.id,
  704. "scrobble_id": self.id,
  705. "media_type": self.media_type,
  706. "probably_still_in_progress": is_in_progress,
  707. },
  708. )
  709. return is_in_progress
  710. @property
  711. def can_be_updated(self) -> bool:
  712. if self.media_obj.__class__.__name__ in LONG_PLAY_MEDIA.values():
  713. logger.info(
  714. "[scrobbling] cannot be updated, long play media",
  715. extra={
  716. "media_id": self.media_obj.id,
  717. "scrobble_id": self.id,
  718. "media_type": self.media_type,
  719. },
  720. )
  721. return False
  722. if self.percent_played >= 100 and not self.probably_still_in_progress:
  723. logger.info(
  724. "[scrobbling] cannot be updated, existing scrobble is 100% played",
  725. extra={
  726. "media_id": self.media_obj.id,
  727. "scrobble_id": self.id,
  728. "media_type": self.media_type,
  729. },
  730. )
  731. return False
  732. if self.is_stale:
  733. logger.info(
  734. "[scrobbling] cannot be udpated, stale",
  735. extra={
  736. "media_id": self.media_obj.id,
  737. "scrobble_id": self.id,
  738. "media_type": self.media_type,
  739. },
  740. )
  741. return False
  742. logger.info(
  743. "[scrobbling] can be updated",
  744. extra={
  745. "media_id": self.media_obj.id,
  746. "scrobble_id": self.id,
  747. "media_type": self.media_type,
  748. },
  749. )
  750. return True
  751. @property
  752. def media_obj(self):
  753. media_obj = None
  754. if self.video:
  755. media_obj = self.video
  756. if self.track:
  757. media_obj = self.track
  758. if self.podcast_episode:
  759. media_obj = self.podcast_episode
  760. if self.sport_event:
  761. media_obj = self.sport_event
  762. if self.book:
  763. media_obj = self.book
  764. if self.video_game:
  765. media_obj = self.video_game
  766. if self.board_game:
  767. media_obj = self.board_game
  768. if self.geo_location:
  769. media_obj = self.geo_location
  770. if self.web_page:
  771. media_obj = self.web_page
  772. if self.life_event:
  773. media_obj = self.life_event
  774. if self.mood:
  775. media_obj = self.mood
  776. if self.brickset:
  777. media_obj = self.brickset
  778. if self.trail:
  779. media_obj = self.trail
  780. if self.beer:
  781. media_obj = self.beer
  782. if self.task:
  783. media_obj = self.task
  784. return media_obj
  785. def __str__(self):
  786. timestamp = self.timestamp.strftime("%Y-%m-%d")
  787. return f"Scrobble of {self.media_obj} ({timestamp})"
  788. def calc_reading_duration(self) -> int:
  789. duration = 0
  790. if self.logdata.page_data:
  791. for k, v in self.logdata.page_data.items():
  792. duration += v.get("duration")
  793. return duration
  794. def calc_pages_read(self) -> int:
  795. pages_read = 0
  796. if self.logdata.page_data:
  797. pages = [int(k) for k in self.logdata.page_data.keys()]
  798. pages.sort()
  799. if len(pages) == 1:
  800. pages_read = 1
  801. elif len(pages) >= 2:
  802. pages_read += pages[-1] - pages[0]
  803. else:
  804. pages_read = pages[-1] - pages[0]
  805. return pages_read
  806. @property
  807. def last_page_read(self) -> int:
  808. last_page = 0
  809. if self.logdata.page_data:
  810. pages = [int(k) for k in self.logdata.page_data.keys()]
  811. pages.sort()
  812. last_page = pages[-1]
  813. return last_page
  814. @property
  815. def get_media_source_url(self) -> str:
  816. url = ""
  817. if self.media_type == "Website":
  818. url = self.media_obj.url
  819. if self.media_type == "Task":
  820. url = self.media_obj.source_url_for_user(self.user)
  821. return url
  822. @classmethod
  823. def create_or_update(
  824. cls, media, user_id: int, scrobble_data: dict, **kwargs
  825. ) -> "Scrobble":
  826. key = media_class_to_foreign_key(media.__class__.__name__)
  827. media_query = models.Q(**{key: media})
  828. scrobble_data[key + "_id"] = media.id
  829. # Find our last scrobble of this media item (track, video, etc)
  830. scrobble = (
  831. cls.objects.filter(
  832. media_query,
  833. user_id=user_id,
  834. )
  835. .order_by("-timestamp")
  836. .first()
  837. )
  838. source = scrobble_data.get("source", "Vrobbler")
  839. mtype = media.__class__.__name__
  840. mopidy_status = scrobble_data.get("mopidy_status", None)
  841. # GeoLocations are a special case scrobble
  842. if mtype == cls.MediaType.GEO_LOCATION:
  843. logger.warning(
  844. f"[create_or_update] geoloc requires create_or_update_location"
  845. )
  846. scrobble = cls.create_or_update_location(
  847. media, scrobble_data, user_id
  848. )
  849. return scrobble
  850. logger.info(
  851. f"[create_or_update] check for existing scrobble to update ",
  852. extra={
  853. "scrobble_id": scrobble.id if scrobble else None,
  854. "media_type": mtype,
  855. "media_id": media.id,
  856. "scrobble_data": scrobble_data,
  857. },
  858. )
  859. scrobble_data["playback_status"] = scrobble_data.pop("status", None)
  860. # If it's marked as stopped, send it through our update mechanism, which will complete it
  861. if scrobble and (
  862. scrobble.can_be_updated
  863. or scrobble_data["playback_status"] == "stopped"
  864. ):
  865. if "log" in scrobble_data.keys() and scrobble.log:
  866. scrobble_data["log"] = scrobble.log | scrobble_data["log"]
  867. return scrobble.update(scrobble_data)
  868. # Discard status before creating
  869. scrobble_data.pop("playback_status")
  870. logger.info(
  871. f"[scrobbling] creating new scrobble",
  872. extra={
  873. "scrobble_id": scrobble.id if scrobble else None,
  874. "media_type": mtype,
  875. "media_id": media.id,
  876. "source": source,
  877. },
  878. )
  879. scrobble = cls.create(scrobble_data)
  880. return scrobble
  881. @classmethod
  882. def create_or_update_location(
  883. cls, location: GeoLocation, scrobble_data: dict, user_id: int
  884. ) -> "Scrobble":
  885. """Location is special type, where the current scrobble for a user is always the
  886. current active scrobble, and we only finish it a move on if we get a new location
  887. that is far enough (and far enough over the last three past scrobbles) to have
  888. actually moved.
  889. """
  890. key = media_class_to_foreign_key(location.__class__.__name__)
  891. scrobble_data[key + "_id"] = location.id
  892. scrobble = (
  893. cls.objects.filter(
  894. media_type=cls.MediaType.GEO_LOCATION,
  895. user_id=user_id,
  896. timestamp__lte=scrobble_data.get("timestamp"),
  897. )
  898. .order_by("-timestamp")
  899. .first()
  900. )
  901. logger.info(
  902. f"[scrobbling] fetching last location scrobble",
  903. extra={
  904. "scrobble_id": scrobble.id if scrobble else None,
  905. "media_type": cls.MediaType.GEO_LOCATION,
  906. "media_id": location.id,
  907. "scrobble_data": scrobble_data,
  908. },
  909. )
  910. if not scrobble:
  911. logger.info(
  912. f"[scrobbling] finished - no existing location scrobbles found",
  913. extra={
  914. "media_id": location.id,
  915. "media_type": cls.MediaType.GEO_LOCATION,
  916. },
  917. )
  918. return cls.create(scrobble_data)
  919. if scrobble.media_obj == location:
  920. logger.info(
  921. f"[scrobbling] finished - same location - not moved",
  922. extra={
  923. "media_type": cls.MediaType.GEO_LOCATION,
  924. "media_id": location.id,
  925. "scrobble_id": scrobble.id,
  926. "scrobble_media_id": scrobble.media_obj.id,
  927. },
  928. )
  929. return scrobble
  930. has_moved = location.has_moved(scrobble.media_obj)
  931. logger.info(
  932. f"[scrobbling] checking - has last location has moved?",
  933. extra={
  934. "scrobble_id": scrobble.id,
  935. "scrobble_media_id": scrobble.media_obj.id,
  936. "media_type": cls.MediaType.GEO_LOCATION,
  937. "media_id": location.id,
  938. "has_moved": has_moved,
  939. },
  940. )
  941. if not has_moved:
  942. logger.info(
  943. f"[scrobbling] finished - not from old location - not moved",
  944. extra={
  945. "scrobble_id": scrobble.id,
  946. "media_id": location.id,
  947. "media_type": cls.MediaType.GEO_LOCATION,
  948. "old_media__id": scrobble.media_obj.id,
  949. },
  950. )
  951. return scrobble
  952. if existing_locations := location.in_proximity(named=True):
  953. existing_location = existing_locations.first()
  954. ts = int(pendulum.now().timestamp())
  955. scrobble.log[
  956. ts
  957. ] = f"Location {location.id} too close to this scrobble"
  958. scrobble.save(update_fields=["log"])
  959. logger.info(
  960. f"[scrobbling] finished - found existing named location",
  961. extra={
  962. "media_id": location.id,
  963. "media_type": cls.MediaType.GEO_LOCATION,
  964. "old_media_id": existing_location.id,
  965. },
  966. )
  967. return scrobble
  968. scrobble.stop(force_finish=True)
  969. scrobble = cls.create(scrobble_data)
  970. logger.info(
  971. f"[scrobbling] finished - created for location",
  972. extra={
  973. "scrobble_id": scrobble.id,
  974. "media_id": location.id,
  975. "scrobble_data": scrobble_data,
  976. "media_type": cls.MediaType.GEO_LOCATION,
  977. "source": scrobble_data.get("source"),
  978. },
  979. )
  980. return scrobble
  981. def update(self, scrobble_data: dict) -> "Scrobble":
  982. # Status is a field we get from Mopidy, which refuses to poll us
  983. playback_status = scrobble_data.pop("playback_status", None)
  984. logger.info(
  985. "[update] called",
  986. extra={
  987. "scrobble_id": self.id,
  988. "scrobble_data": scrobble_data,
  989. "media_type": self.media_type,
  990. "playback_status": playback_status,
  991. },
  992. )
  993. if self.beyond_completion_percent:
  994. playback_status = "stopped"
  995. if playback_status == "stopped":
  996. self.stop()
  997. if playback_status == "paused":
  998. self.pause()
  999. if playback_status == "resumed":
  1000. self.resume()
  1001. if playback_status != "resumed":
  1002. scrobble_data["stop_timestamp"] = (
  1003. scrobble_data.pop("timestamp", None) or timezone.now()
  1004. )
  1005. # timestamp should be more-or-less immutable
  1006. scrobble_data.pop("timestamp", None)
  1007. update_fields = []
  1008. for key, value in scrobble_data.items():
  1009. setattr(self, key, value)
  1010. update_fields.append(key)
  1011. self.save(update_fields=update_fields)
  1012. return self
  1013. @classmethod
  1014. def create(
  1015. cls,
  1016. scrobble_data: dict,
  1017. ) -> "Scrobble":
  1018. scrobble = cls.objects.create(
  1019. **scrobble_data,
  1020. )
  1021. profile = UserProfile.objects.filter(
  1022. user_id=scrobble_data["user_id"]
  1023. ).first()
  1024. if profile and profile.ntfy_enabled and profile.ntfy_url:
  1025. # TODO allow prority and tags to be configured in the profile
  1026. notify_str = f"{scrobble.media_obj}"
  1027. if scrobble.log and scrobble.log.get("description"):
  1028. notify_str += f" - {scrobble.log.get('description')}"
  1029. if scrobble.media_obj.subtitle:
  1030. notify_str += f" - {scrobble.media_obj.subtitle}"
  1031. requests.post(
  1032. profile.ntfy_url,
  1033. data=notify_str.encode(encoding="utf-8"),
  1034. headers={
  1035. "Title": scrobble.media_obj.strings.verb,
  1036. "Priority": "default",
  1037. "Tags": scrobble.media_obj.strings.tags,
  1038. },
  1039. )
  1040. return scrobble
  1041. def stop(self, force_finish=False) -> None:
  1042. self.stop_timestamp = timezone.now()
  1043. self.played_to_completion = True
  1044. self.in_progress = False
  1045. if not self.playback_position_seconds:
  1046. self.playback_position_seconds = int(
  1047. (self.stop_timestamp - self.timestamp).total_seconds()
  1048. )
  1049. self.save(
  1050. update_fields=[
  1051. "in_progress",
  1052. "played_to_completion",
  1053. "stop_timestamp",
  1054. "playback_position_seconds",
  1055. ]
  1056. )
  1057. class_name = self.media_obj.__class__.__name__
  1058. if class_name in LONG_PLAY_MEDIA.values():
  1059. self.finish_long_play()
  1060. logger.info(
  1061. f"[scrobbling] stopped",
  1062. extra={
  1063. "scrobble_id": self.id,
  1064. "media_id": self.media_obj.id,
  1065. "media_type": self.media_type,
  1066. "source": self.source,
  1067. },
  1068. )
  1069. def pause(self) -> None:
  1070. if self.is_paused:
  1071. logger.warning(f"{self.id} - already paused - {self.source}")
  1072. return
  1073. self.is_paused = True
  1074. self.save(update_fields=["is_paused"])
  1075. logger.info(
  1076. f"[scrobbling] paused",
  1077. extra={
  1078. "scrobble_id": self.id,
  1079. "media_type": self.media_type,
  1080. "source": self.source,
  1081. },
  1082. )
  1083. def resume(self) -> None:
  1084. if self.is_paused or not self.in_progress:
  1085. self.is_paused = False
  1086. self.in_progress = True
  1087. self.save(update_fields=["is_paused", "in_progress"])
  1088. logger.info(
  1089. f"[scrobbling] resumed",
  1090. extra={
  1091. "scrobble_id": self.id,
  1092. "media_type": self.media_type,
  1093. "source": self.source,
  1094. },
  1095. )
  1096. def cancel(self) -> None:
  1097. self.delete()
  1098. def update_ticks(self, data) -> None:
  1099. self.playback_position_seconds = data.get("playback_position_seconds")
  1100. self.save(update_fields=["playback_position_seconds"])
  1101. def finish_long_play(self):
  1102. seconds_elapsed = (timezone.now() - self.timestamp).seconds
  1103. past_seconds = 0
  1104. # Set our playback seconds, and calc long play seconds
  1105. self.playback_position_seconds = seconds_elapsed
  1106. if self.previous:
  1107. past_seconds = self.previous.long_play_seconds or 0
  1108. self.long_play_seconds = past_seconds + seconds_elapsed
  1109. # Long play scrobbles are always finished when we say they are
  1110. self.played_to_completion = True
  1111. self.save(
  1112. update_fields=[
  1113. "playback_position_seconds",
  1114. "played_to_completion",
  1115. "long_play_seconds",
  1116. ]
  1117. )
  1118. logger.info(
  1119. f"[scrobbling] finishing long play",
  1120. extra={
  1121. "scrobble_id": self.id,
  1122. },
  1123. )
  1124. @property
  1125. def beyond_completion_percent(self) -> bool:
  1126. """Returns true if our media is beyond our completion percent, unless
  1127. our type is geolocation in which case we always return false
  1128. """
  1129. beyond_completion = (
  1130. self.percent_played >= self.media_obj.COMPLETION_PERCENT
  1131. )
  1132. if self.media_type == "GeoLocation":
  1133. logger.info(
  1134. f"[scrobbling] locations are ONLY completed when new one is created",
  1135. extra={
  1136. "scrobble_id": self.id,
  1137. "media_type": self.media_type,
  1138. "beyond_completion": beyond_completion,
  1139. },
  1140. )
  1141. beyond_completion = False
  1142. return beyond_completion