sbtools/downloader.py
2016-03-27 15:35:08 +02:00

48 lines
1.4 KiB
Python
Executable file

#!/usr/bin/env python3
import argparse, os, requests, sys
def fileDownloader(url, sender = False):
if sender:
os.makedirs(sender, exist_ok=True)
filename = "%s/%s" % (sender, url.split('/')[-1])
else:
filename = url.split('/')[-1]
if not os.path.isfile(filename):
remote = requests.get(url, stream=True)
with open(filename, 'wb') as outfile:
for chunk in remote.iter_content(chunk_size=1024):
if chunk:
outfile.write(chunk)
outfile.flush()
else:
print("File %s already exists. Skipping.", file=sys.stderr)
def urlparse(url):
if "/img/usr/" in url:
return url
if "/auswertung/pix/popup.php/" in url:
return "http://www.planetromeo.com/img/usr/%s" % url[52:82]
else:
raise ValueError("%s is not a valid URL.")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("url", type=str, nargs="+", help="photo URL as copied from message window")
group = parser.add_mutually_exclusive_group()
group.add_argument("-s", "--sender", type=str, help="name of the sender, puts images into subdirectory")
group.add_argument("-g", "--get-urls", help="get URLs only, don't download", action="store_true")
args = parser.parse_args()
for url in args.url:
try:
purl = urlparse(url)
except ValueError:
print("%s is not a valid URL. Skipping." % url, file=sys.stderr)
else:
if args.get_urls:
print(purl)
elif args.sender:
fileDownloader(purl, args.sender)
else:
fileDownloader(purl)