diff --git a/crawlab/db/manager.py b/crawlab/db/manager.py index c1c6d88a..71ea9b2b 100644 --- a/crawlab/db/manager.py +++ b/crawlab/db/manager.py @@ -28,7 +28,7 @@ class DbManager(object): if item.get('stats') is not None: item.pop('stats') - col.save(item, **kwargs) + return col.save(item, **kwargs) def remove(self, col_name: str, cond: dict, **kwargs) -> None: """ diff --git a/crawlab/routes/spiders.py b/crawlab/routes/spiders.py index ba315ce9..157218ee 100644 --- a/crawlab/routes/spiders.py +++ b/crawlab/routes/spiders.py @@ -21,7 +21,7 @@ from tasks.spider import execute_spider from utils import jsonify from utils.deploy import zip_file, unzip_file from utils.file import get_file_suffix_stats, get_file_suffix -from utils.spider import get_lang_by_stats +from utils.spider import get_lang_by_stats, get_last_n_run_errors_count, get_last_n_day_tasks_count parser = reqparse.RequestParser() parser.add_argument('file', type=FileStorage, location='files') @@ -106,7 +106,7 @@ class SpiderApi(BaseApi): if spider is None: stats = get_file_suffix_stats(dir_path) lang = get_lang_by_stats(stats) - db_manager.save('spiders', { + spider = db_manager.save('spiders', { 'name': dir_name, 'src': dir_path, 'lang': lang, @@ -137,6 +137,13 @@ class SpiderApi(BaseApi): 'suffix_stats': stats, }) + # --------- + # stats + # --------- + # last 5-run errors + spider['last_5_errors'] = get_last_n_run_errors_count(spider_id=spider['_id'], n=5) + spider['last_7d_tasks'] = get_last_n_day_tasks_count(spider_id=spider['_id'], n=5) + # append spider items.append(spider) diff --git a/crawlab/tasks/spider.py b/crawlab/tasks/spider.py index ad47b655..0d843e22 100644 --- a/crawlab/tasks/spider.py +++ b/crawlab/tasks/spider.py @@ -3,7 +3,7 @@ from datetime import datetime from time import sleep from bson import ObjectId -from pymongo import ASCENDING +from pymongo import ASCENDING, DESCENDING from config import PROJECT_DEPLOY_FILE_FOLDER, PROJECT_LOGS_FOLDER, PYTHON_ENV_PATH from constants.task import TaskStatus diff --git a/crawlab/utils/spider.py b/crawlab/utils/spider.py index 0a45d28f..6f7d4ef6 100644 --- a/crawlab/utils/spider.py +++ b/crawlab/utils/spider.py @@ -1,6 +1,10 @@ import os +from datetime import datetime, timedelta + +from bson import ObjectId from constants.spider import FILE_SUFFIX_LANG_MAPPING, LangType, SUFFIX_IGNORE, SpiderType +from constants.task import TaskStatus from db.manager import db_manager @@ -43,3 +47,25 @@ def get_spider_col_fields(col_name: str) -> list: for k in item.keys(): fields.add(k) return list(fields) + + +def get_last_n_run_errors_count(spider_id: ObjectId, n: int) -> list: + tasks = db_manager.list(col_name='tasks', + cond={'spider_id': spider_id}, + sort_key='create_ts', + limit=n) + count = 0 + for task in tasks: + if task['status'] == TaskStatus.FAILURE: + count += 1 + return count + + +def get_last_n_day_tasks_count(spider_id: ObjectId, n: int) -> list: + return db_manager.count(col_name='tasks', + cond={ + 'spider_id': spider_id, + 'create_ts': { + '$gte': (datetime.now() - timedelta(n)) + } + }) diff --git a/frontend/src/i18n/zh.js b/frontend/src/i18n/zh.js index 69147f67..13baddc7 100644 --- a/frontend/src/i18n/zh.js +++ b/frontend/src/i18n/zh.js @@ -86,6 +86,8 @@ export default { 'Variable': '变量', 'Value': '值', 'Add Environment Variables': '添加环境变量', + 'Last 7-Day Tasks': '最近7天任务数', + 'Last 5-Run Errors': '最近5次运行错误数', // 爬虫列表 'Name': '名称', @@ -117,6 +119,7 @@ export default { 'Schedule Name': '定时任务名称', 'Schedule Description': '定时任务描述', 'Parameters': '参数', + 'Add Schedule': '添加定时任务', // 文件 'Choose Folder': '选择文件', diff --git a/frontend/src/views/spider/SpiderList.vue b/frontend/src/views/spider/SpiderList.vue index 60785efe..14a9ffea 100644 --- a/frontend/src/views/spider/SpiderList.vue +++ b/frontend/src/views/spider/SpiderList.vue @@ -84,6 +84,17 @@ {{scope.row.lang}} + + + - +