From 187e7927ba0b57e449257aa914ad05c5b977d67c Mon Sep 17 00:00:00 2001 From: Jackson Taylor Date: Sun, 7 Aug 2022 09:32:37 -0400 Subject: Save retry file until retryFile flag is used This way if someone were to have multiple playlists that they wanted to download, all of the failed urls would get dumped to 1 file. Then they could retry them all at once. --- jamos | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) (limited to 'jamos') diff --git a/jamos b/jamos index 0783e70..c068b5c 100755 --- a/jamos +++ b/jamos @@ -208,9 +208,9 @@ def create_downloader(music_directory, cookies): def save_urls_from_playlist_to_file(filename, urls): try: - f = open(filename, "w") + f = open(filename, "a") for url in urls: - f.writelines(url) + f.writelines(url + '\n') f.close() except Exception as e: print(e) @@ -262,6 +262,14 @@ if __name__ == "__main__": save_urls_from_playlist_to_file( os.path.join(music_directory, "jamos_failed_urls.txt"), failed_urls) + elif args.retryFile: + # Just because we don't have any failed urls in this run, doesn't + # mean that we can get rid of the retry file. We'll only remove it + # if it's been explicitly tried and we have no failed urls. + + # We've successfully downloaded all of the previously failed urls. + # Delete the file + os.remove(args.retryFile) except Exception as ex: print(ex) print("Saving failed urls to file failed! Printing failed urls:") -- cgit v1.2.3