فهرست منبع

[scrobbles] Add some tests around jellyfin and start cleaning up

Colin Powell 9 ماه پیش
والد
کامیت
9affd6e03a

+ 62 - 8
tests/scrobbles_tests/conftest.py

@@ -1,8 +1,8 @@
 import json
-import pytest
 
-from rest_framework.authtoken.models import Token
+import pytest
 from django.contrib.auth import get_user_model
+from rest_framework.authtoken.models import Token
 
 User = get_user_model()
 
@@ -13,7 +13,7 @@ class MopidyRequest:
     album = "Sublime"
     track_number = 4
     run_time_ticks = 156604
-    run_time = "156"
+    run_time = 60
     playback_time_ticks = 15045
     musicbrainz_track_id = "54214d63-5adf-4909-87cd-c65c37a6d558"
     musicbrainz_album_id = "03b864cd-7761-314c-a892-05a89ddff00d"
@@ -63,10 +63,9 @@ def valid_auth_token():
     user = User.objects.create(email="test@exmaple.com")
     return Token.objects.create(user=user).key
 
-
 @pytest.fixture
-def mopidy_track_request_data():
-    return MopidyRequest().request_json
+def mopidy_track():
+    return MopidyRequest()
 
 
 @pytest.fixture
@@ -78,6 +77,61 @@ def mopidy_track_diff_album_request_data(**kwargs):
 
 
 @pytest.fixture
-def mopidy_podcast_request_data():
+def mopidy_podcast():
     mopidy_uri = "local:podcast:Up%20First/2022-01-01%20Up%20First.mp3"
-    return MopidyRequest(mopidy_uri=mopidy_uri).request_json
+    return MopidyRequest(mopidy_uri=mopidy_uri)
+
+
+class JellyfinTrackRequest:
+    name = "Emotion"
+    artist = "Carly Rae Jepsen"
+    album = "Emotion"
+    track_number = 1
+    item_type = "Audio"
+    timestamp = "2024-01-14 12:00:19"
+    run_time_ticks = 156604
+    run_time = "00:00:60"
+    playback_time_ticks = 15045
+    musicbrainz_track_id = "54214d63-5adf-4909-87cd-c65c37a6d558"
+    musicbrainz_album_id = "03b864cd-7761-314c-a892-05a89ddff00d"
+    musicbrainz_artist_id = "95f5b748-d370-47fe-85bd-0af2dc450bc0"
+    status = "resumed"
+
+    def __init__(self, **kwargs):
+        self.request_data = {
+            "Name": kwargs.get("name", self.name),
+            "Artist": kwargs.get("artist", self.artist),
+            "Album": kwargs.get("album", self.album),
+            "TrackNumber": int(kwargs.get("track_number", self.track_number)),
+            "RunTime": kwargs.get("run_time", self.run_time),
+            "ItemType": kwargs.get("item_type", self.item_type),
+            "UtcTimestamp": kwargs.get("timestamp", self.timestamp),
+            "PlaybackPositionTicks": int(
+                kwargs.get("playback_time_ticks", self.playback_time_ticks)
+            ),
+            "Provider_musicbrainztrack": kwargs.get(
+                "musicbrainz_track_id", self.musicbrainz_track_id
+            ),
+            "Provider_musicbrainzalbum": kwargs.get(
+                "musicbrainz_album_id", self.musicbrainz_album_id
+            ),
+            "Provider_musicbrainzartist": kwargs.get(
+                "musicbrainz_artist_id", self.musicbrainz_artist_id
+            ),
+            "Status": kwargs.get("status", self.status),
+        }
+
+    def __eq__(self, other):
+        for key in self.request_data.keys():
+            if self.request_data[key] != getattr(self, key):
+                return False
+        return True
+
+    @property
+    def request_json(self):
+        return json.dumps(self.request_data)
+
+
+@pytest.fixture
+def jellyfin_track():
+    return JellyfinTrackRequest()

+ 132 - 14
tests/scrobbles_tests/test_views.py

@@ -1,4 +1,9 @@
+from datetime import datetime, timedelta
+from unittest.mock import patch
+from django.utils import timezone
+
 import pytest
+import time_machine
 from django.urls import reverse
 from music.models import Track
 from podcasts.models import PodcastEpisode
@@ -27,29 +32,71 @@ def test_bad_mopidy_request_data(client, valid_auth_token):
 
 @pytest.mark.django_db
 def test_scrobble_mopidy_track(
-    client, mopidy_track_request_data, valid_auth_token
+    client, mopidy_track, valid_auth_token
 ):
     url = reverse("scrobbles:mopidy-webhook")
     headers = {"Authorization": f"Token {valid_auth_token}"}
-    response = client.post(
-        url,
-        mopidy_track_request_data,
-        content_type="application/json",
-        headers=headers,
-    )
-    assert response.status_code == 200
-    assert response.data == {"scrobble_id": 1}
 
-    scrobble = Scrobble.objects.get(id=1)
-    assert scrobble.media_obj.__class__ == Track
-    assert scrobble.media_obj.title == "Same in the End"
+    # Start new scrobble
+    seconds = 1
+    scrobble_id = 1
+    with time_machine.travel(datetime(2024, 1, 14, 12, 0, seconds)):
+        mopidy_track.request_data["playback_time_ticks"] = seconds * 1000
+        response = client.post(
+            url,
+            mopidy_track.request_json,
+            content_type="application/json",
+            headers=headers,
+        )
+        assert response.status_code == 200
+        assert response.data == {"scrobble_id": scrobble_id}
+
+        scrobble = Scrobble.objects.get(id=1)
+        assert scrobble.media_obj.__class__ == Track
+        assert scrobble.media_obj.title == "Same in the End"
+
+    # Continue existingscrobble
+    seconds = 58
+    scrobble_id = 1
+    with time_machine.travel(datetime(2024, 1, 14, 12, 0, seconds)):
+        mopidy_track.request_data["playback_time_ticks"] = seconds * 1000
+        response = client.post(
+            url,
+            mopidy_track.request_json,
+            content_type="application/json",
+            headers=headers,
+        )
+        assert response.status_code == 200
+        assert response.data == {"scrobble_id": scrobble_id}
+
+        scrobble = Scrobble.objects.get(id=1)
+        assert scrobble.media_obj.__class__ == Track
+        assert scrobble.media_obj.title == "Same in the End"
+
+    # Continue new scrobble
+    seconds = 61
+    scrobble_id = 2
+    with time_machine.travel(datetime(2024, 1, 14, 12, 1, seconds)):
+        mopidy_track.request_data["playback_time_ticks"] = seconds * 1000
+        response = client.post(
+            url,
+            mopidy_track.request_json,
+            content_type="application/json",
+            headers=headers,
+        )
+        assert response.status_code == 200
+        assert response.data == {"scrobble_id": scrobble_id}
+
+        scrobble = Scrobble.objects.get(id=1)
+        assert scrobble.media_obj.__class__ == Track
+        assert scrobble.media_obj.title == "Same in the End"
 
 
 @pytest.mark.skip(reason="Allmusic API is unstable")
 @pytest.mark.django_db
 def test_scrobble_mopidy_same_track_different_album(
     client,
-    mopidy_track_request_data,
+    mopidy_track,
     mopidy_track_diff_album_request_data,
     valid_auth_token,
 ):
@@ -57,7 +104,7 @@ def test_scrobble_mopidy_same_track_different_album(
     headers = {"Authorization": f"Token {valid_auth_token}"}
     response = client.post(
         url,
-        mopidy_track_request_data,
+        mopidy_track.request_data,
         content_type="application/json",
         headers=headers,
     )
@@ -98,3 +145,74 @@ def test_scrobble_mopidy_podcast(
     scrobble = Scrobble.objects.get(id=1)
     assert scrobble.media_obj.__class__ == PodcastEpisode
     assert scrobble.media_obj.title == "Up First"
+
+
+@pytest.mark.django_db
+@patch("music.utils.lookup_artist_from_mb", return_value={})
+@patch("music.utils.lookup_album_dict_from_mb", return_value={"year": "1999", "mb_group_id": 1})
+@patch("music.utils.lookup_track_from_mb", return_value={})
+@patch("music.models.lookup_artist_from_tadb", return_value={})
+@patch("music.models.lookup_album_from_tadb", return_value={"year": "1999"})
+@patch("music.models.Album.fetch_artwork", return_value=None)
+@patch("music.models.Album.scrape_allmusic", return_value=None)
+def test_scrobble_jellyfin_track(
+        mock_lookup_artist,
+        mock_lookup_album,
+        mock_lookup_track,
+        mock_lookup_artist_tadb,
+        mock_lookup_album_tadb,
+        mock_fetch_artwork,
+        mock_scrape_allmusic,
+        client,
+        jellyfin_track,
+        valid_auth_token,
+):
+    url = reverse("scrobbles:jellyfin-webhook")
+    headers = {"Authorization": f"Token {valid_auth_token}"}
+
+    with time_machine.travel(datetime(2024, 1, 14, 12, 00, 1)):
+        jellyfin_track.request_data["UtcTimestamp"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S")
+        response = client.post(
+            url,
+            jellyfin_track.request_json,
+            content_type="application/json",
+            headers=headers,
+        )
+        assert response.status_code == 200
+        assert response.data == {"scrobble_id": 1}
+
+        scrobble = Scrobble.objects.get(id=1)
+        assert scrobble.media_obj.__class__ == Track
+        assert scrobble.media_obj.title == "Emotion"
+
+    with time_machine.travel(datetime(2024, 1, 14, 12, 0, 58)):
+        jellyfin_track.request_data["UtcTimestamp"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S")
+        response = client.post(
+            url,
+            jellyfin_track.request_json,
+            content_type="application/json",
+            headers=headers,
+        )
+
+        assert response.status_code == 200
+        assert response.data == {"scrobble_id": 1}
+
+        scrobble = Scrobble.objects.get(id=1)
+        assert scrobble.media_obj.__class__ == Track
+        assert scrobble.media_obj.title == "Emotion"
+
+    with time_machine.travel(datetime(2024, 1, 14, 12, 1, 1)):
+        jellyfin_track.request_data["UtcTimestamp"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S")
+        response = client.post(
+            url,
+            jellyfin_track.request_json,
+            content_type="application/json",
+            headers=headers,
+        )
+
+        assert response.status_code == 200
+        assert response.data == {"scrobble_id": 2}
+
+        scrobble = Scrobble.objects.get(id=1)
+        assert scrobble.media_obj.__class__ == Track
+        assert scrobble.media_obj.title == "Emotion"

+ 1 - 3
vrobbler/apps/books/amazon.py

@@ -55,9 +55,7 @@ def scrape_data_from_amazon(url) -> dict:
     r = requests.get(url, headers=headers)
     if r.status_code == 200:
         soup = BeautifulSoup(r.text, "html.parser")
-        import pdb
-
-        pdb.set_trace()
+        # TODO Fix this scraper
         data_dict["rating"] = get_rating_from_soup(soup)
         data_dict["review"] = get_review_from_soup(soup)
     return data_dict

+ 1 - 1
vrobbler/apps/music/utils.py

@@ -90,7 +90,7 @@ def get_or_create_album(
     return album
 
 
-def get_or_create_track(post_data: dict, post_keys: dict):
+def get_or_create_track(post_data: dict, post_keys: dict) -> Track:
     artist_name = post_data.get(post_keys.get("ARTIST_NAME"), "")
     artist_mb_id = post_data.get(post_keys.get("ARTIST_MB_ID"), "")
     album_title = post_data.get(post_keys.get("ALBUM_NAME"), "")

+ 1 - 1
vrobbler/apps/podcasts/utils.py

@@ -54,7 +54,7 @@ def parse_mopidy_uri(uri: str) -> dict:
     }
 
 
-def get_or_create_podcast(post_data: dict):
+def get_or_create_podcast(post_data: dict) -> PodcastEpisode:
     mopidy_uri = post_data.get("mopidy_uri", "")
     parsed_data = parse_mopidy_uri(mopidy_uri)
 

+ 19 - 18
vrobbler/apps/scrobbles/scrobblers.py

@@ -1,8 +1,10 @@
 import json
 import logging
+from datetime import datetime
 from typing import Optional
 
 import pendulum
+import pytz
 from boardgames.models import BoardGame
 from books.models import Book
 from dateutil.parser import parse
@@ -45,15 +47,13 @@ def mopidy_scrobble_media(post_data: dict, user_id: int) -> Scrobble:
     else:
         media_obj = get_or_create_track(post_data, MOPIDY_POST_KEYS)
 
-    # Now we run off a scrobble
-    playback_seconds = post_data.get("playback_time_ticks", 1) / 1000
-    playback_status = post_data.get(MOPIDY_POST_KEYS.get("STATUS"), "")
-
     return media_obj.scrobble_for_user(
         user_id,
         source="Mopidy",
-        playback_position_seconds=playback_seconds,
-        status=playback_status,
+        playback_position_seconds=int(
+            post_data.get("playback_time_ticks", 1) / 1000
+        ),
+        status=post_data.get(MOPIDY_POST_KEYS.get("STATUS"), ""),
     )
 
 
@@ -78,12 +78,6 @@ def jellyfin_scrobble_media(
         and post_data.get("NotificationType") == "PlaybackProgress"
     )
 
-    playback_status = "resumed"
-    if post_data.get("IsPaused"):
-        playback_status = "paused"
-    elif post_data.get("NotificationType") == "PlaybackStop":
-        playback_status = "stopped"
-
     # Jellyfin has some race conditions with it's webhooks, these hacks fix some of them
     if null_position_on_progress:
         logger.info(
@@ -92,10 +86,6 @@ def jellyfin_scrobble_media(
         )
         return
 
-    playback_position_seconds = convert_to_seconds(
-        post_data.get(JELLYFIN_POST_KEYS.get("RUN_TIME"), 0)
-    )
-
     if media_type == Scrobble.MediaType.VIDEO:
         media_obj = Video.find_or_create(post_data)
     else:
@@ -103,9 +93,20 @@ def jellyfin_scrobble_media(
             post_data, post_keys=JELLYFIN_POST_KEYS
         )
 
-    return media_obj.scrobble_for_user_id(
+    timestamp = parse(
+        post_data.get(JELLYFIN_POST_KEYS.get("TIMESTAMP"))
+    ).replace(tzinfo=pytz.utc)
+    playback_status = "resumed"
+    if post_data.get("IsPaused"):
+        playback_status = "paused"
+    elif post_data.get("NotificationType") == "PlaybackStop":
+        playback_status = "stopped"
+
+    # TODO Add some logging here, maybe?
+
+    return media_obj.scrobble_for_user(
         user_id,
-        playback_position_seconds=playback_position_seconds,
+        playback_position_seconds=(timezone.now() - timestamp).seconds,
         status=playback_status,
     )