diff --git a/src/integrations/imports/yamtrack.py b/src/integrations/imports/yamtrack.py index f1b998fc..42f44bdd 100644 --- a/src/integrations/imports/yamtrack.py +++ b/src/integrations/imports/yamtrack.py @@ -8,10 +8,12 @@ import app from app.models import MediaTypes, Sources +from app import media_type_config from app.providers import services from app.templatetags import app_tags from integrations.imports import helpers -from integrations.imports.helpers import MediaImportError, MediaImportUnexpectedError +from integrations.imports.helpers import (MediaImportError, + MediaImportUnexpectedError) logger = logging.getLogger(__name__) @@ -165,12 +167,70 @@ def _handle_missing_metadata(self, row, media_type, season_number, episode_numbe if row["source"] == Sources.MANUAL.value and row["image"] == "": row["image"] = settings.IMG_NONE else: - metadata = services.get_media_metadata( - media_type, - row["media_id"], - row["source"], - [season_number], - episode_number, + try: + if row["media_id"] is not None and row["media_id"] != "": + metadata = services.get_media_metadata( + media_type, + row["media_id"], + row["source"], + season_number, + episode_number, + ) + row["title"] = metadata["title"] + row["image"] = metadata["image"] + else: + searchquery = row["title"] + metadata = services.search( + media_type, + searchquery, + 1, + media_type_config.get_default_source_name(media_type) + ) + row["title"] = metadata["results"][0]["title"] + logger.info(f"Added title from {media_type_config.get_default_source_name(media_type)}: {row['title']}") + row["source"] = metadata["results"][0]["source"] + row["media_id"] = metadata["results"][0]["media_id"] + logger.info(f"Obtained media id : {row['media_id']}") + + row["media_type"] = media_type + row["image"] = metadata["results"][0]["image"] + except services.ProviderAPIError as e: + self.warnings.append( + f"Failed to fetch metadata for {row['media_id']}: {e!s}", + ) + raise + + def _handle_missing_book_metadata(self, row, media_type): + """Handle missing metadata by fetching from provider - + Format #isbn,providerid,provider,title,read_start,read_end """ + try: + searchquery = row["title"] + if row["source"] != "": + metadata = services.get_media_metadata( + media_type, + row["media_id"], + row["source"], + ) + row["title"] = metadata["title"] + row["image"] = metadata["image"] + else: + metadata = services.search( + media_type, + searchquery, + 1, + Sources.HARDCOVER.value, + ) + row["title"] = metadata["results"][0]["title"] + logger.info(f"Added title from harcover: {row['title']}") + row["source"] = Sources.HARDCOVER.value + row["media_id"] = metadata["results"][0]["media_id"] + logger.info(f"Obtained media id harcover: {row['media_id']}") + + row["media_type"] = media_type + row["image"] = metadata["results"][0]["image"] + except services.ProviderAPIError as e: + self.warnings.append( + f"Failed to fetch metadata for {row['media_id']}: {e!s}", ) - row["title"] = metadata["title"] - row["image"] = metadata["image"] + raise + diff --git a/src/integrations/tests/mock_data/import_yamtrack_partials.csv b/src/integrations/tests/mock_data/import_yamtrack_partials.csv new file mode 100644 index 00000000..bbb94db3 --- /dev/null +++ b/src/integrations/tests/mock_data/import_yamtrack_partials.csv @@ -0,0 +1,5 @@ +"media_id","source","media_type","title","image","season_number","episode_number","score","progress","status","start_date","end_date","notes","progressed_at" +"","","book","Warlock","","","","","","Completed","2024-02-09","2024-03-09","Title Only","" +"429650","hardcover","book","Warlock","","","","","","In progress","","2024-04-09","Media ID","" +"","","book","0312980388","","","","","","Completed","2024-01-09","2024-05-09","ISBN 10 in title","" +"","","movie","Perfect Blue","","","","9.0","1","Completed","","2024-02-09","","2024-02-09T15:30:00Z" diff --git a/src/integrations/tests/test_imports.py b/src/integrations/tests/test_imports.py index 8d3be127..1a3cf5c4 100644 --- a/src/integrations/tests/test_imports.py +++ b/src/integrations/tests/test_imports.py @@ -8,28 +8,10 @@ from django.test import TestCase from django_celery_beat.models import CrontabSchedule, PeriodicTask -from app.models import ( - TV, - Anime, - Episode, - Game, - Item, - Manga, - MediaTypes, - Movie, - Season, - Sources, - Status, -) -from integrations.imports import ( - anilist, - helpers, - hltb, - kitsu, - mal, - simkl, - yamtrack, -) +from app.models import (TV, Anime, Book, Episode, Game, Item, Manga, + MediaTypes, Movie, Season, Sources, Status) +from integrations.imports import (anilist, helpers, hltb, kitsu, mal, simkl, + yamtrack) from integrations.imports.trakt import TraktImporter, importer mock_path = Path(__file__).resolve().parent / "mock_data" @@ -819,3 +801,46 @@ def test_create_import_schedule_every_2_days(self): schedule = CrontabSchedule.objects.first() self.assertEqual(schedule.day_of_week, "*/2") +class ImportYamtrackPartials(TestCase): + """Test importing yamtrack media with no ID.""" + + def setUp(self): + """Create user for the tests.""" + self.credentials = {"username": "test", "password": "12345"} + self.user = get_user_model().objects.create_user(**self.credentials) + with Path(mock_path / "import_yamtrack_partials.csv").open("rb") as file: + self.import_results =yamtrack.importer(file, self.user, "new") + + def test_import_counts(self): + """Test basic counts of imported media.""" + self.assertEqual(Book.objects.filter(user=self.user).count(), 3) + self.assertEqual(Movie.objects.filter(user=self.user).count(), 1) + + + def test_historical_records(self): + """Test historical records creation during import.""" + book = Book.objects.filter(user=self.user).first() + self.assertEqual(book.history.count(), 1) + #self.assertEqual( + # book.history.first().history_date, + # datetime(2005, 4, 1, 0, 0, 0, tzinfo=UTC), + #) + bookqs = Book.objects.filter( + user=self.user, + item__title="Warlock", + ).order_by("-end_date") + books = list(bookqs) + + self.assertEqual(len(books),3) + self.assertEqual( + books[0].end_date, + datetime(2024, 5, 9, 0, 0, 0, tzinfo=UTC), + ) + self.assertEqual( + books[1].end_date, + datetime(2024, 4, 9, 0, 0, 0, tzinfo=UTC), + ) + self.assertEqual( + books[2].end_date, + datetime(2024, 3, 9, 0, 0, 0, tzinfo=UTC), + )