From d0a9c5bc4d208fe9a420778d5117b4b938a090a6 Mon Sep 17 00:00:00 2001 From: "Michael K. Steinberg" Date: Mon, 9 Jan 2023 19:41:15 +0200 Subject: [PATCH] Impement dynamic settings changing & support for external disks --- config.py | 12 ++------- settings.py | 9 +++++++ spotify_mass_download.py | 57 +++++++++++++++++++++------------------- spotify_scraper.py | 2 +- spotify_utils.py | 12 ++++++--- templates/index.html | 19 ++++++++++++++ webgui.py | 14 +++++++--- 7 files changed, 79 insertions(+), 46 deletions(-) create mode 100644 settings.py diff --git a/config.py b/config.py index 0bb6461..7ada536 100644 --- a/config.py +++ b/config.py @@ -12,6 +12,7 @@ import shutil from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from pydeezer.constants import track_formats +from settings import Settings USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36' SP_DC = os.getenv('SP_DC') @@ -20,16 +21,7 @@ SP_KEY = os.getenv('SP_KEY') PROXY = {} VERIFY_SSL = True - -FULL_DOWNLOAD_RECURISVE_LIMIT = 0x4000 -FULL_DOWNLOAD_THREAD_LIMIT = 50 -VERBOSE_OUTPUTS = False -AUTO_DOWNLOAD_PLAYLIST_METADATA = True - -DEFAULT_DOWNLOAD_DIRECTORY = 'music' -ARTIST_IMAGES_SUB_DIR = '_Artists' -PLAYLIST_METADATA_SUB_DIR = '_Playlists' -GLOBALS_SAVE_FILE = '_downloaded_store.json' +settings = Settings() def clean_file_path(prompt: str): return prompt.replace('?', '').replace('"', '').replace('*', '').replace('|', '').replace('\\', '').replace(':', '').replace('>', '').replace('<', '') diff --git a/settings.py b/settings.py new file mode 100644 index 0000000..abf5f9e --- /dev/null +++ b/settings.py @@ -0,0 +1,9 @@ +class Settings: + DEFAULT_DOWNLOAD_DIRECTORY = 'music' + ARTIST_IMAGES_SUB_DIR = '_Artists' + PLAYLIST_METADATA_SUB_DIR = '_Playlists' + GLOBALS_SAVE_FILE = '_downloaded_store.json' + FULL_DOWNLOAD_RECURISVE_LIMIT = 0x4000 + FULL_DOWNLOAD_THREAD_LIMIT = 50 + VERBOSE_OUTPUTS = False + AUTO_DOWNLOAD_PLAYLIST_METADATA = True diff --git a/spotify_mass_download.py b/spotify_mass_download.py index 2e51051..ccee16e 100644 --- a/spotify_mass_download.py +++ b/spotify_mass_download.py @@ -50,7 +50,7 @@ console = Console() def download_track_list(download_dir: str, track_list: list, recursive_artist: bool=False, recursive_album: bool=False, recursive: bool=False, recursive_limit=1024): global g_downloaded_songs, g_downloaded_artist_covers my_thread_id = str(get_ident()).zfill(6) - artist_images_download_dir = f'{download_dir}/{ARTIST_IMAGES_SUB_DIR}' + artist_images_download_dir = f'{download_dir}/{settings.ARTIST_IMAGES_SUB_DIR}' downloaded_count = 0 for track in track_list: try: @@ -61,9 +61,9 @@ def download_track_list(download_dir: str, track_list: list, recursive_artist: b downloaded_count += 1 continue g_downloaded_songs.append(track.spotify_id) - track_path = f'{download_dir}/{track.artists[0].name}/{track.album.title}/{", ".join([x.name for x in track.artists])} - {track.title} [{track.album.title}].mp3' + track_path = f'{download_dir}{track.artists[0].name}/{track.album.title}' track.download_to_file(scraper, track_path) - console.happy(f'Thread<{my_thread_id}> | Downloaded: {track_path}') + console.happy(f'Thread<{my_thread_id}> | Downloaded: {track.preview_title()}') if (recursive_album or recursive) and len(track_list) < recursive_limit: new_tracks = scraper.scrape_album_tracks(track.album.spotify_id) for new_track in new_tracks: @@ -93,14 +93,14 @@ def download_track_list(download_dir: str, track_list: list, recursive_artist: b except Exception as ex: console.error(f'Thread<{my_thread_id}> | Exception: {ex}') downloaded_count += 1 - if VERBOSE_OUTPUTS: + if settings.VERBOSE_OUTPUTS: console.log(f'Thread<{my_thread_id}> | Processed {downloaded_count} / {len(track_list)}') def save_globals_save_file(): global g_keep_saving, g_downloaded_artist_covers, g_downloaded_songs try: - with open(GLOBALS_SAVE_FILE, 'r') as f: + with open(settings.GLOBALS_SAVE_FILE, 'r') as f: data = json.loads(f.read()) g_downloaded_songs = json.loads(data['songs']) g_downloaded_artist_covers = json.loads(data['artists']) @@ -108,47 +108,50 @@ def save_globals_save_file(): except Exception as ex: console.error(f'Failed to load globals save file! Exception: {ex}') while g_keep_saving > 0: - with open(GLOBALS_SAVE_FILE, 'w') as f: + with open(settings.GLOBALS_SAVE_FILE, 'w') as f: g_downloaded_songs_json = json.dumps(g_downloaded_songs) g_downloaded_artist_covers_json = json.dumps(g_downloaded_artist_covers) data = {'songs':g_downloaded_songs_json, 'artists': g_downloaded_artist_covers_json } f.write( json.dumps(data) ) - if VERBOSE_OUTPUTS: + if settings.VERBOSE_OUTPUTS: console.log('Saved globals file!') sleep(15) def full_download(download_dir: str, identifier: str, recursive_artist: bool=False, recursive_album: bool=False, recursive: bool=False, recursive_limit:int=1024, thread_count:int=5): global g_downloaded_songs, g_downloaded_artist_covers, g_keep_saving - artist_images_download_dir = f'{download_dir}/{ARTIST_IMAGES_SUB_DIR}' - os.makedirs(artist_images_download_dir, exist_ok=True) - os.makedirs(f'temp', exist_ok=True) + try: + artist_images_download_dir = f'{download_dir}/{settings.ARTIST_IMAGES_SUB_DIR}' + os.makedirs(artist_images_download_dir, exist_ok=True) + os.makedirs(f'temp', exist_ok=True) - g_keep_saving += 1 + g_keep_saving += 1 - client.refresh_tokens() - console.log(f'Recieved scrape command on identifier: {identifier}, {recursive=}, {recursive_artist=}, {recursive_album=}, {recursive_limit=}, {thread_count=}') - track_list = scraper.scrape_tracks(identifier, console=console) - console.log(f'Scraping on identifier: {identifier} yielded {len(track_list)} tracks!') - download_threads = [] - thread_subsection_size = int(len(track_list) / thread_count) - for i in range(thread_count - 1): - download_threads.append(Thread(target=download_track_list, args=(download_dir, track_list[thread_subsection_size * i : (thread_subsection_size * i) + thread_subsection_size], recursive_artist, recursive_album, recursive, recursive_limit))) + client.refresh_tokens() + console.log(f'Recieved scrape command on identifier: {identifier}, {recursive=}, {recursive_artist=}, {recursive_album=}, {recursive_limit=}, {thread_count=}') + track_list = scraper.scrape_tracks(identifier, console=console) + console.log(f'Scraping on identifier: {identifier} yielded {len(track_list)} tracks!') + download_threads = [] + thread_subsection_size = int(len(track_list) / thread_count) + for i in range(thread_count - 1): + download_threads.append(Thread(target=download_track_list, args=(download_dir, track_list[thread_subsection_size * i : (thread_subsection_size * i) + thread_subsection_size], recursive_artist, recursive_album, recursive, recursive_limit))) + download_threads[-1].start() + sleep(0.05) + download_threads.append(Thread(target=download_track_list, args=(download_dir, track_list[thread_subsection_size * (thread_count - 1):], recursive_artist, recursive_album, recursive, recursive_limit))) download_threads[-1].start() - sleep(0.05) - download_threads.append(Thread(target=download_track_list, args=(download_dir, track_list[thread_subsection_size * (thread_count - 1):], recursive_artist, recursive_album, recursive, recursive_limit))) - download_threads[-1].start() - [x.join() for x in download_threads] + [x.join() for x in download_threads] - console.log(f'Comletely done scraping identifier: {identifier}!') + console.log(f'Comletely done scraping identifier: {identifier}!') - g_keep_saving -= 1 + g_keep_saving -= 1 + except Exception as ex: + console.error(f'Full download exception: {ex}') def download_all_categories_playlists(download_meta_data_only=True): client.refresh_tokens() - os.makedirs(f'{DEFAULT_DOWNLOAD_DIRECTORY}/{PLAYLIST_METADATA_SUB_DIR}/', exist_ok=True) + os.makedirs(f'{settings.DEFAULT_DOWNLOAD_DIRECTORY}/{settings.PLAYLIST_METADATA_SUB_DIR}/', exist_ok=True) console.log(f'Scraping playlists from all categories') category_ids = scraper.get_categories_ids() random.shuffle(category_ids) @@ -162,7 +165,7 @@ def download_all_categories_playlists(download_meta_data_only=True): playlist = scraper.get_playlist(playlist_id) playlist.export_to_file() if not download_meta_data_only: - full_download(f'{DEFAULT_DOWNLOAD_DIRECTORY}', identifier=playlist.href) + full_download(f'{settings.DEFAULT_DOWNLOAD_DIRECTORY}', identifier=playlist.href) except Exception as ex: console.error(f'Scraping categories exception: {ex}') except Exception as ex: diff --git a/spotify_scraper.py b/spotify_scraper.py index fe4669d..2e18747 100644 --- a/spotify_scraper.py +++ b/spotify_scraper.py @@ -61,7 +61,7 @@ class SpotifyScraper: if len(tracks) != int(playlist_data['total']): print(f'Warning: track count does not match! {len(tracks)} != {int(playlist_data["tracks"]["total"])}') spotify_tracks = [SpotifyTrack(track_data) for track_data in tracks] - if AUTO_DOWNLOAD_PLAYLIST_METADATA: + if settings.AUTO_DOWNLOAD_PLAYLIST_METADATA: playlist = SpotifyPlaylist(playlist_id, spotify_tracks, self.get_playlist_data(playlist_id)) playlist.export_to_file() return spotify_tracks diff --git a/spotify_utils.py b/spotify_utils.py index 0d1b6e8..a1d55d1 100644 --- a/spotify_utils.py +++ b/spotify_utils.py @@ -167,6 +167,9 @@ class SpotifyTrack: self.thumbnail = self.download_thumbnail(scraper) self.lyrics = self.get_lyrics(scraper) + def preview_title(self): + return f'{", ".join([x.name for x in self.artists])} - {self.title} [{self.album.title}]' + def download_to_file(self, scraper, output_path: str): temp_file_path = f'temp/{hashlib.sha1(self.title.encode() + self.album.spotify_id.encode()).hexdigest()}.temp.mp3' self.package_download(scraper) @@ -188,9 +191,9 @@ class SpotifyTrack: audio_file.tag.save() - output_path = clean_file_path(output_path) - os.makedirs(os.path.dirname(output_path), exist_ok=True) - shutil.move(temp_file_path, output_path) + full_output_path = output_path + '/' + clean_file_path(self.preview_title()) + '.mp3' + os.makedirs(os.path.dirname(full_output_path), exist_ok=True) + shutil.move(temp_file_path, full_output_path) class SpotifyPlaylist: @@ -222,7 +225,8 @@ class SpotifyPlaylist: return json.dumps(data) def export_to_file(self) -> None: - with open(f'{DEFAULT_DOWNLOAD_DIRECTORY}/{PLAYLIST_METADATA_SUB_DIR}/{self.spotify_id}.playlist', 'w') as f: + os.makedirs(f'{settings.DEFAULT_DOWNLOAD_DIRECTORY}/{settings.PLAYLIST_METADATA_SUB_DIR}/', exist_ok=True) + with open(f'{settings.DEFAULT_DOWNLOAD_DIRECTORY}/{settings.PLAYLIST_METADATA_SUB_DIR}/{self.spotify_id}.playlist', 'w') as f: f.write(self.export()) @property diff --git a/templates/index.html b/templates/index.html index 07828ab..fabeb33 100644 --- a/templates/index.html +++ b/templates/index.html @@ -29,6 +29,16 @@ document.getElementById('console-output').scrollTop = 999999999999999999999999; } }, 1000); + + let settings_visible = true; + function toggle_settings_visibility() { + if (settings_visible) { + document.getElementById('settings-container').setAttribute('hidden', '') + } else { + document.getElementById('settings-container').removeAttribute('hidden') + }; + settings_visible = !settings_visible; + }; @@ -45,6 +55,15 @@ +
+
+
+
+ + +
+
+
diff --git a/webgui.py b/webgui.py index 00e8cc5..f4eb5b7 100644 --- a/webgui.py +++ b/webgui.py @@ -9,7 +9,7 @@ app.config['TEMPLATES_AUTO_RELOAD'] = True @app.route('/') def index(): - return render_template('index.html') + return render_template('index.html', settings=settings) @app.route('/actions/download/', methods=['POST']) @@ -19,12 +19,12 @@ def actions_download(): recursive = request.form.get('recursive') or False recursive_artist = request.form.get('recursive-artist') or False recursive_album = request.form.get('recursive-album') or False - recursive_limit = min(int(request.form.get('recursive-limit')) or 1024, FULL_DOWNLOAD_RECURISVE_LIMIT) - thread_count = min(int(request.form.get('thread-count')) or 5, FULL_DOWNLOAD_THREAD_LIMIT) + recursive_limit = min(int(request.form.get('recursive-limit')) or 1024, settings.FULL_DOWNLOAD_RECURISVE_LIMIT) + thread_count = min(int(request.form.get('thread-count')) or 5, settings.FULL_DOWNLOAD_THREAD_LIMIT) recursive = True if recursive == 'on' else False recursive_album = True if recursive_album == 'on' else False recursive_artist = True if recursive_artist == 'on' else False - full_download(DEFAULT_DOWNLOAD_DIRECTORY, spotify_url, recursive=recursive, recursive_artist=recursive_artist, recursive_album=recursive_album, recursive_limit=recursive_limit, thread_count=thread_count) + full_download(settings.DEFAULT_DOWNLOAD_DIRECTORY, spotify_url, recursive=recursive, recursive_artist=recursive_artist, recursive_album=recursive_album, recursive_limit=recursive_limit, thread_count=thread_count) return 'success' except Exception as ex: return str(ex) @@ -45,5 +45,11 @@ def info_console(): return jsonify( {'logs': logs[offset:], 'offset': len(logs)} ) +@app.route('/settings/', methods=['POST']) +def change_settings(): + settings.DEFAULT_DOWNLOAD_DIRECTORY = request.form.get('download-dir') + return 'success' + + if __name__ == '__main__': app.run(debug=True)