added TaskOverview

This commit is contained in:
Marvin Zhang
2019-02-23 18:39:22 +08:00
parent c7cf064c21
commit 278d6e72f5
6 changed files with 50 additions and 2 deletions

View File

@@ -98,3 +98,16 @@ class NodeApi(BaseApi):
'status': 'ok',
'items': deploys
})
def get_tasks(self, id):
items = db_manager.list('tasks', {'node_id': id})
for item in items:
spider_id = item['spider_id']
spider = db_manager.get('spiders', id=str(spider_id))
item['spider_name'] = spider['name']
task = db_manager.get('tasks_celery', id=item['_id'])
item['status'] = task['status']
return jsonify({
'status': 'ok',
'items': items
})

View File

@@ -78,7 +78,13 @@ class SpiderApi(BaseApi):
})
def crawl(self, id):
job = execute_spider.delay(id)
args = self.parser.parse_args()
node_id = args.get('node_id')
if node_id is None:
return {}, 400
job = execute_spider.delay(id, node_id)
# print('crawl: %s' % id)
return {
'code': 200,
@@ -147,3 +153,19 @@ class SpiderApi(BaseApi):
'status': 'ok',
'items': deploys
})
def get_tasks(self, id):
items = db_manager.list('tasks', {'spider_id': ObjectId(id)})
for item in items:
spider_id = item['spider_id']
spider = db_manager.get('spiders', id=str(spider_id))
item['spider_name'] = spider['name']
task = db_manager.get('tasks_celery', id=item['_id'])
if task is not None:
item['status'] = task['status']
else:
item['status'] = 'UNAVAILABLE'
return jsonify({
'status': 'ok',
'items': items
})

View File

@@ -12,6 +12,17 @@ class TaskApi(BaseApi):
)
def get(self, id=None):
if id is not None:
task = db_manager.get('tasks', id=id)
_task = db_manager.get('tasks_celery', id=task['_id'])
_spider = db_manager.get('spiders', id=str(task['spider_id']))
task['status'] = _task['status']
task['result'] = _task['result']
task['spider_name'] = _spider['name']
with open(task['log_file_path']) as f:
task['log'] = f.read()
return jsonify(task)
tasks = db_manager.list('tasks', {}, limit=1000)
items = []
for task in tasks:

1
spiders/meitui/app.py Normal file
View File

@@ -0,0 +1 @@
# /Users/yeqing/projects/crawlab/spiders

View File

View File

@@ -15,7 +15,7 @@ logger = get_logger(__name__)
@celery_app.task(bind=True)
def execute_spider(self, id: str):
def execute_spider(self, id: str, node_id: str):
task_id = self.request.id
hostname = self.request.hostname
spider = db_manager.get('spiders', id=id)
@@ -43,6 +43,7 @@ def execute_spider(self, id: str):
'_id': task_id,
'spider_id': ObjectId(id),
'create_ts': datetime.now(),
'node_id': node_id,
'hostname': hostname,
'log_file_path': log_file_path,
})