Better recovery on index failures
This commit is contained in:
parent
17aed24966
commit
48a4b89840
@ -9,10 +9,15 @@ from planet.spider import filename
|
||||
from planet import config
|
||||
|
||||
def open():
|
||||
cache = config.cache_directory()
|
||||
index=os.path.join(cache,'index')
|
||||
if not os.path.exists(index): return None
|
||||
return dbhash.open(filename(index, 'id'),'w')
|
||||
try:
|
||||
cache = config.cache_directory()
|
||||
index=os.path.join(cache,'index')
|
||||
if not os.path.exists(index): return None
|
||||
return dbhash.open(filename(index, 'id'),'w')
|
||||
except Exception, e:
|
||||
if e.__class__.__name__ == 'DBError': e = e.args[-1]
|
||||
from planet import logger as log
|
||||
log.error(str(e))
|
||||
|
||||
def destroy():
|
||||
from planet import logger as log
|
||||
|
@ -15,6 +15,8 @@ re_slash = re.compile(r'[?/:|]+')
|
||||
re_initial_cruft = re.compile(r'^[,.]*')
|
||||
re_final_cruft = re.compile(r'[,.]*$')
|
||||
|
||||
index = True
|
||||
|
||||
def filename(directory, filename):
|
||||
"""Return a filename suitable for the cache.
|
||||
|
||||
@ -197,7 +199,8 @@ def spiderFeed(feed):
|
||||
scrub(feed, data)
|
||||
|
||||
from planet import idindex
|
||||
index = idindex.open()
|
||||
global index
|
||||
if index != None: index = idindex.open()
|
||||
|
||||
# write each entry to the cache
|
||||
cache = config.cache_directory()
|
||||
@ -290,6 +293,9 @@ def spiderPlanet():
|
||||
log = planet.getLogger(config.log_level())
|
||||
planet.setTimeout(config.feed_timeout())
|
||||
|
||||
global index
|
||||
index = True
|
||||
|
||||
for feed in config.subscriptions():
|
||||
try:
|
||||
spiderFeed(feed)
|
||||
|
Loading…
Reference in New Issue
Block a user