Retry的几次之后,系统感觉就不在动了,好像任务停止了一样
源码如下:
class Handler(BaseHandler):
crawl_config = {
}
def __init__(self):
self.deal = Deal()
@every(minutes=24 * 60)
def on_start(self):
self.crawl('http://hangkong.citytt.com/hk-1/', callback=self.index_page)
@config(age=10 * 24 * 60 * 60)
def index_page(self, response):
for each in response.doc('.list_centaa a[href^="http"]').items():
self.crawl(each.attr.href, callback=self.detail_page,connect_timeout = 50,timeout = 200,save={'name': each.text()})
@config(priority=2)
def detail_page(self, response):
html =response.doc('div.center_rt').html()
#print html
pattern = re.compile(u'航空公司:(.*?)<br.*?IATA代码:(.*?)<br.*?官方网站:(.*?)<br',re.S)
items = re.findall(pattern, html)
result = ''
for item in items:
print item
result = "[%s,%s,%s],\n" % (item[0],item[1],item[2])
print result
#self.deal.saveBrief(result, DIR_PATH, "aaa")
return {
"url":result,
"name": response.save['name'],
}
将timeout值修改大一点可解决