From c4143399e58483a844d0b9bf5c305a7ea04fcf5e Mon Sep 17 00:00:00 2001 From: marvzhang Date: Thu, 18 Jun 2020 12:54:12 +0800 Subject: [PATCH] added error notification for scrapy spiders --- backend/services/scrapy.go | 9 +-- frontend/src/api/request.js | 2 +- .../src/components/Scrapy/SpiderScrapy.vue | 64 +++++++++++++++++-- frontend/src/i18n/zh.js | 3 + frontend/src/store/modules/spider.js | 30 +++++++++ frontend/src/utils/html.js | 16 +++++ frontend/src/utils/index.js | 4 +- 7 files changed, 118 insertions(+), 10 deletions(-) create mode 100644 frontend/src/utils/html.js diff --git a/backend/services/scrapy.go b/backend/services/scrapy.go index 5b91c501..eee7893d 100644 --- a/backend/services/scrapy.go +++ b/backend/services/scrapy.go @@ -6,6 +6,7 @@ import ( "crawlab/entity" "crawlab/model" "encoding/json" + "errors" "fmt" "github.com/Unknwon/goconfig" "github.com/apex/log" @@ -29,7 +30,7 @@ func GetScrapySpiderNames(s model.Spider) ([]string, error) { if err := cmd.Run(); err != nil { log.Errorf(err.Error()) debug.PrintStack() - return []string{}, err + return []string{}, errors.New(stderr.String()) } spiderNames := strings.Split(stdout.String(), "\n") @@ -56,7 +57,7 @@ func GetScrapySettings(s model.Spider) (res []map[string]interface{}, err error) log.Errorf(err.Error()) log.Errorf(stderr.String()) debug.PrintStack() - return res, err + return res, errors.New(stderr.String()) } if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil { @@ -147,7 +148,7 @@ func GetScrapyItems(s model.Spider) (res []map[string]interface{}, err error) { log.Errorf(err.Error()) log.Errorf(stderr.String()) debug.PrintStack() - return res, err + return res, errors.New(stderr.String()) } if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil { @@ -213,7 +214,7 @@ func GetScrapyPipelines(s model.Spider) (res []string, err error) { log.Errorf(err.Error()) log.Errorf(stderr.String()) debug.PrintStack() - return res, err + return res, errors.New(stderr.String()) } if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil { diff --git a/frontend/src/api/request.js b/frontend/src/api/request.js index 3724de17..1b4cf2af 100644 --- a/frontend/src/api/request.js +++ b/frontend/src/api/request.js @@ -45,7 +45,7 @@ const request = (method, path, params, data, others = {}) => { if (response.status === 500) { Message.error(response.data.error) } - return e + return response }) } diff --git a/frontend/src/components/Scrapy/SpiderScrapy.vue b/frontend/src/components/Scrapy/SpiderScrapy.vue index 784b2c44..cb2195ac 100644 --- a/frontend/src/components/Scrapy/SpiderScrapy.vue +++ b/frontend/src/components/Scrapy/SpiderScrapy.vue @@ -119,7 +119,16 @@ > -
+
+ + {{$t('No data available')}} + + +
+
-
+
+ + {{$t('No data available. Please check whether your spiders are missing dependencies or no spiders created.')}} + + +
+
-
+
+ + {{$t('No data available')}} + + +
+
+
+ + {{$t('No data available')}} + + +
  • ') } } } @@ -780,4 +821,19 @@ export default { .items >>> .custom-tree-node .el-input { width: 240px; } + + .empty-text { + display: block; + margin-bottom: 20px; + } + + .empty-text.error { + color: #f56c6c; + } + + .errors-label { + color: #f56c6c; + display: block; + margin-bottom: 10px; + } diff --git a/frontend/src/i18n/zh.js b/frontend/src/i18n/zh.js index 308d9974..cce72808 100644 --- a/frontend/src/i18n/zh.js +++ b/frontend/src/i18n/zh.js @@ -37,6 +37,7 @@ export default { Running: '进行中', Finished: '已完成', Error: '错误', + Errors: '错误', NA: '未知', Cancelled: '已取消', Abnormal: '异常', @@ -416,6 +417,8 @@ export default { 'Disclaimer': '免责声明', 'Please search dependencies': '请搜索依赖', 'No Data': '暂无数据', + 'No data available': '暂无数据', + 'No data available. Please check whether your spiders are missing dependencies or no spiders created.': '暂无数据。请检查您的爬虫是否缺少依赖,或者没有创建爬虫。', 'Show installed': '查看已安装', 'Installing dependency successful': '安装依赖成功', 'Installing dependency failed': '安装依赖失败', diff --git a/frontend/src/store/modules/spider.js b/frontend/src/store/modules/spider.js index b3b3a70d..0c70c7cc 100644 --- a/frontend/src/store/modules/spider.js +++ b/frontend/src/store/modules/spider.js @@ -19,6 +19,9 @@ const state = { // spider scrapy pipelines spiderScrapyPipelines: [], + // scrapy errors + spiderScrapyErrors: {}, + // node to deploy/run activeNode: {}, @@ -116,6 +119,13 @@ const mutations = { }, SET_CONFIG_LIST_TS (state, value) { state.configListTs = value + }, + SET_SPIDER_SCRAPY_ERRORS (state, value) { + for (let key in value) { + if (value.hasOwnProperty(key)) { + Vue.set(state.spiderScrapyErrors, key, value[key]) + } + } } } @@ -142,11 +152,20 @@ const actions = { }, async getSpiderScrapySpiders ({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/spiders`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { spiders: res.data.error }) + return + } state.spiderForm.spider_names = res.data.data commit('SET_SPIDER_FORM', state.spiderForm) + commit('SET_SPIDER_SCRAPY_ERRORS', { spiders: '' }) }, async getSpiderScrapySettings ({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/settings`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { settings: res.data.error }) + return + } commit('SET_SPIDER_SCRAPY_SETTINGS', res.data.data.map(d => { const key = d.key const value = d.value @@ -164,12 +183,17 @@ const actions = { type } })) + commit('SET_SPIDER_SCRAPY_ERRORS', { settings: '' }) }, async saveSpiderScrapySettings ({ state }, id) { return request.post(`/spiders/${id}/scrapy/settings`, state.spiderScrapySettings) }, async getSpiderScrapyItems ({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/items`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { items: res.data.error }) + return + } let nodeId = 0 commit('SET_SPIDER_SCRAPY_ITEMS', res.data.data.map(d => { d.id = nodeId++ @@ -186,6 +210,7 @@ const actions = { }) return d })) + commit('SET_SPIDER_SCRAPY_ERRORS', { items: '' }) }, async saveSpiderScrapyItems ({ state }, id) { return request.post(`/spiders/${id}/scrapy/items`, state.spiderScrapyItems.map(d => { @@ -196,7 +221,12 @@ const actions = { }, async getSpiderScrapyPipelines ({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/pipelines`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { pipelines: res.data.error }) + return + } commit('SET_SPIDER_SCRAPY_PIPELINES', res.data.data) + commit('SET_SPIDER_SCRAPY_ERRORS', { pipelines: '' }) }, async saveSpiderScrapyPipelines ({ state }, id) { return request.post(`/spiders/${id}/scrapy/pipelines`, state.spiderScrapyPipelines) diff --git a/frontend/src/utils/html.js b/frontend/src/utils/html.js new file mode 100644 index 00000000..289fdda4 --- /dev/null +++ b/frontend/src/utils/html.js @@ -0,0 +1,16 @@ +export default { + htmlEscape: text => { + return text.replace(/[<>"&]/g, function (match, pos, originalText) { + switch (match) { + case '<': + return '<' + case '>': + return '>' + case '&': + return '&' + case '"': + return '"' + } + }) + } +} diff --git a/frontend/src/utils/index.js b/frontend/src/utils/index.js index 765d0e93..4c725c2b 100644 --- a/frontend/src/utils/index.js +++ b/frontend/src/utils/index.js @@ -4,6 +4,7 @@ import tour from './tour' import log from './log' import scrapy from './scrapy' import doc from './doc' +import html from './html' export default { stats, @@ -11,5 +12,6 @@ export default { tour, log, scrapy, - doc + doc, + html }