Prevent files from being downloaded over and over again
This commit is contained in:
parent
0db066c9b9
commit
b3a609fa88
1 changed files with 9 additions and 8 deletions
17
cah.py
17
cah.py
|
@ -5,16 +5,17 @@ __maintainer__ = __author__
|
||||||
__email__ = "info@klaus-uwe.me"
|
__email__ = "info@klaus-uwe.me"
|
||||||
__version__ = "0.1"
|
__version__ = "0.1"
|
||||||
|
|
||||||
import requests, bs4, PIL.Image, io
|
import requests, bs4, PIL.Image, io, os
|
||||||
|
|
||||||
def fileDownloader(url):
|
def fileDownloader(url):
|
||||||
filename = url.split('/')[-1]
|
filename = url.split('/')[-1]
|
||||||
remote = requests.get(url, stream=True)
|
if not os.path.isfile(filename):
|
||||||
with open(filename, 'wb') as outfile:
|
remote = requests.get(url, stream=True)
|
||||||
for chunk in remote.iter_content(chunk_size=1024):
|
with open(filename, 'wb') as outfile:
|
||||||
if chunk:
|
for chunk in remote.iter_content(chunk_size=1024):
|
||||||
outfile.write(chunk)
|
if chunk:
|
||||||
outfile.flush()
|
outfile.write(chunk)
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
def siteHandler(p = 15):
|
def siteHandler(p = 15):
|
||||||
suppe = bs4.BeautifulSoup(requests.get('http://explosm.net/comics/' + str(p)).text)
|
suppe = bs4.BeautifulSoup(requests.get('http://explosm.net/comics/' + str(p)).text)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue