Fix scraper crash on local files

dev
Michael Kuperfish Steinberg 2023-01-20 15:17:11 +02:00
rodzic d4d4c48150
commit fe43b16edf
3 zmienionych plików z 12 dodań i 3 usunięć

Wyświetl plik

@ -39,7 +39,7 @@ class SpotifyScraper:
def extract_id_from_link(self, link: str) -> str:
return link[link.rindex('/') + 1:]
def scrape_tracks(self, link: str, console=None) -> list:
def scrape_tracks(self, link: str, console=None) -> list[SpotifyTrack]:
id_type = self.identify_link_type(link)
if id_type == self.IDTypes.Playlist:
return self.scrape_playlist_tracks(self.extract_id_from_link(link))

Wyświetl plik

@ -14,8 +14,11 @@ class SpotifyAlbum:
self.load_from_data(album_data)
def load_from_data(self, data):
if not data['album_type']:
return
self.title = data['name']
self.thumbnail_href = data['images'][0]['url']
if len(data['images']) > 0:
self.thumbnail_href = data['images'][0]['url']
self.track_count = data['total_tracks']
try:
self.release_date = time.mktime(datetime.datetime.strptime(data['release_date'], "%Y-%m-%d").timetuple())

Wyświetl plik

@ -48,7 +48,9 @@ class SpotifyTrack:
self.explicit = data['explicit']
self.href = data['href']
self.popularity = data['popularity']
self.isrc = data['external_ids']['isrc']
if 'isrc' in data['external_ids']:
# isrc is not available for local files
self.isrc = data['external_ids']['isrc']
def __str__(self) -> str:
return f'SpotifyTrack< {self.title} >'
@ -65,9 +67,13 @@ class SpotifyTrack:
return scraper.get(self.thumbnail_href).content
def get_download_link(self, scraper) -> str:
if not self.isrc:
return ''
return Deezer.get_track_download_url(Deezer.get_track_data(Deezer.get_track_id_from_isrc(self.isrc)))[0]
def download(self, scraper) -> bytes:
if not self.isrc:
raise SpotifyTrackException(f'Cannot download local file {self.title}!')
try:
download_link = self.get_download_link(scraper)
data = Deezer.decrypt_download_data(requests.get(download_link, headers={'Accept':'*/*'}), self.isrc)