ajout du module base.url
This commit is contained in:
parent
4b44a52033
commit
7e4dc7ad8d
|
@ -20,7 +20,7 @@ except ImportError:
|
|||
'procs', 'paths', 'htmlentities', 'dates', 'times', 'words',
|
||||
'tmpfiles', 'lines', 'files', 'config', 'editor', 'pager',
|
||||
'getopt', 'optparse', 'args', 'control',
|
||||
'functions', 'json', 'web',
|
||||
'functions', 'json', 'web', 'url',
|
||||
'flock',
|
||||
'password',
|
||||
)
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
# -*- coding: utf-8 mode: python -*- vim:sw=4:sts=4:et:ai:si:sta:fenc=utf-8
|
||||
import i_need_py23
|
||||
|
||||
"""Des fonction pour télécharger des resources
|
||||
"""
|
||||
|
||||
__all__ = ('Downloader',
|
||||
)
|
||||
|
||||
import os, re, urlparse, urllib, shutil
|
||||
from os import path
|
||||
|
||||
from base import make_prop
|
||||
|
||||
class Downloader(object):
|
||||
_starturl, starturl = make_prop('_starturl', None)[:2]
|
||||
_referer, referer = make_prop('_referer', None)[:2]
|
||||
|
||||
def __init__(self, starturl=None):
|
||||
if starturl is not None: self._starturl = starturl
|
||||
|
||||
_headers, headers = make_prop('_headers', None)[:2]
|
||||
def reget(self, url, output=None, content_type=None):
|
||||
baseurl = self._referer
|
||||
if baseurl is None: baseurl = self._starturl
|
||||
if baseurl is not None: url = urlparse.urljoin(baseurl, url)
|
||||
|
||||
filename, headers = urllib.urlretrieve(url)
|
||||
ct = headers.get('content-type', None)
|
||||
if content_type is not None and (ct is None or (ct != content_type and not ct.startswith(content_type))):
|
||||
os.unlink(filename)
|
||||
return None
|
||||
|
||||
if output is None:
|
||||
output = filename
|
||||
else:
|
||||
shutil.copy2(filename, output)
|
||||
os.unlink(filename)
|
||||
|
||||
self._referer = url
|
||||
return output
|
||||
|
||||
def get(self, url, output=None, content_type=None):
|
||||
if output is not None and path.exists(output):
|
||||
return output
|
||||
return self.reget(url, output, content_type)
|
||||
|
||||
def cleanup(self):
|
||||
urllib.urlcleanup()
|
Loading…
Reference in New Issue