First attempt at blacklisting
This commit is contained in:
parent
5c7edd7f56
commit
552dae3c14
@ -303,7 +303,7 @@ def downloadReadingList(list, orig_config, callback, use_cache=True, re_read=Tru
|
|||||||
|
|
||||||
def http_cache_directory():
|
def http_cache_directory():
|
||||||
if parser.has_option('Planet', 'http_cache_directory'):
|
if parser.has_option('Planet', 'http_cache_directory'):
|
||||||
os.path.join(cache_directory(),
|
return os.path.join(cache_directory(),
|
||||||
parser.get('Planet', 'http_cache_directory'))
|
parser.get('Planet', 'http_cache_directory'))
|
||||||
else:
|
else:
|
||||||
return os.path.join(cache_directory(), "cache")
|
return os.path.join(cache_directory(), "cache")
|
||||||
@ -315,9 +315,16 @@ def cache_sources_directory():
|
|||||||
else:
|
else:
|
||||||
return os.path.join(cache_directory(), 'sources')
|
return os.path.join(cache_directory(), 'sources')
|
||||||
|
|
||||||
|
def cache_blacklist_directory():
|
||||||
|
if parser.has_option('Planet', 'cache_blacklist_directory'):
|
||||||
|
return os.path.join(cache_directory(),
|
||||||
|
parser.get('Planet', 'cache_blacklist_directory'))
|
||||||
|
else:
|
||||||
|
return os.path.join(cache_directory(), 'blacklist')
|
||||||
|
|
||||||
def cache_lists_directory():
|
def cache_lists_directory():
|
||||||
if parser.has_option('Planet', 'cache_lists_directory'):
|
if parser.has_option('Planet', 'cache_lists_directory'):
|
||||||
parser.get('Planet', 'cache_lists_directory')
|
return parser.get('Planet', 'cache_lists_directory')
|
||||||
else:
|
else:
|
||||||
return os.path.join(cache_directory(), 'lists')
|
return os.path.join(cache_directory(), 'lists')
|
||||||
|
|
||||||
@ -332,7 +339,7 @@ def feed():
|
|||||||
|
|
||||||
def feedtype():
|
def feedtype():
|
||||||
if parser.has_option('Planet', 'feedtype'):
|
if parser.has_option('Planet', 'feedtype'):
|
||||||
parser.get('Planet', 'feedtype')
|
return parser.get('Planet', 'feedtype')
|
||||||
elif feed() and feed().find('atom')>=0:
|
elif feed() and feed().find('atom')>=0:
|
||||||
return 'atom'
|
return 'atom'
|
||||||
elif feed() and feed().find('rss')>=0:
|
elif feed() and feed().find('rss')>=0:
|
||||||
|
@ -69,6 +69,7 @@ def _is_http_uri(uri):
|
|||||||
def writeCache(feed_uri, feed_info, data):
|
def writeCache(feed_uri, feed_info, data):
|
||||||
log = planet.logger
|
log = planet.logger
|
||||||
sources = config.cache_sources_directory()
|
sources = config.cache_sources_directory()
|
||||||
|
blacklist = config.cache_blacklist_directory()
|
||||||
|
|
||||||
# capture http status
|
# capture http status
|
||||||
if not data.has_key("status"):
|
if not data.has_key("status"):
|
||||||
@ -190,6 +191,13 @@ def writeCache(feed_uri, feed_info, data):
|
|||||||
cache = config.cache_directory()
|
cache = config.cache_directory()
|
||||||
for updated, entry in ids.values():
|
for updated, entry in ids.values():
|
||||||
|
|
||||||
|
# compute blacklist file name based on the id
|
||||||
|
blacklist_file = filename(blacklist, entry.id)
|
||||||
|
|
||||||
|
# check if blacklist file exists. If so, skip it.
|
||||||
|
if os.path.exists(blacklist_file):
|
||||||
|
continue
|
||||||
|
|
||||||
# compute cache file name based on the id
|
# compute cache file name based on the id
|
||||||
cache_file = filename(cache, entry.id)
|
cache_file = filename(cache, entry.id)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user