From a441a7fddd77b9c670f6a46d3afc43545550aeeb Mon Sep 17 00:00:00 2001 From: Marvin Zhang Date: Thu, 30 May 2019 13:09:15 +0800 Subject: [PATCH] download results --- CHANGELOG.md | 3 ++- crawlab/routes/tasks.py | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9bd9c2ad..056d24d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # 0.2.2 (unreleased) ### Features / Enhancement -- **Automatic Extract Fields**: Automatically extracting data fields in list pages for configurable spider +- **Automatic Extract Fields**: Automatically extracting data fields in list pages for configurable spider. +- **Download Results**: Allow downloading results as csv file. - **Baidu Tongji**: Allow users to choose to report usage info to Baidu Tongji. ### Bug Fixes diff --git a/crawlab/routes/tasks.py b/crawlab/routes/tasks.py index fab2457a..5b5bf640 100644 --- a/crawlab/routes/tasks.py +++ b/crawlab/routes/tasks.py @@ -223,6 +223,9 @@ class TaskApi(BaseApi): col_name = spider.get('col') if not col_name: return send_csv([], f'results_{col_name}_{round(time())}.csv') - items = db_manager.list(col_name, {'task_id': id}) + items = db_manager.list(col_name, {'task_id': id}, limit=999999999) fields = get_spider_col_fields(col_name) - return send_csv(items, filename=f'results_{col_name}_{round(time())}.csv', fields=fields, encoding='utf-8') + return send_csv(items, + filename=f'results_{col_name}_{round(time())}.csv', + fields=fields, + encoding='utf-8')