added error notification for scrapy spiders

This commit is contained in:
marvzhang
2020-06-18 12:54:12 +08:00
parent 7551f28b71
commit c4143399e5
7 changed files with 118 additions and 10 deletions

View File

@@ -6,6 +6,7 @@ import (
"crawlab/entity"
"crawlab/model"
"encoding/json"
"errors"
"fmt"
"github.com/Unknwon/goconfig"
"github.com/apex/log"
@@ -29,7 +30,7 @@ func GetScrapySpiderNames(s model.Spider) ([]string, error) {
if err := cmd.Run(); err != nil {
log.Errorf(err.Error())
debug.PrintStack()
return []string{}, err
return []string{}, errors.New(stderr.String())
}
spiderNames := strings.Split(stdout.String(), "\n")
@@ -56,7 +57,7 @@ func GetScrapySettings(s model.Spider) (res []map[string]interface{}, err error)
log.Errorf(err.Error())
log.Errorf(stderr.String())
debug.PrintStack()
return res, err
return res, errors.New(stderr.String())
}
if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil {
@@ -147,7 +148,7 @@ func GetScrapyItems(s model.Spider) (res []map[string]interface{}, err error) {
log.Errorf(err.Error())
log.Errorf(stderr.String())
debug.PrintStack()
return res, err
return res, errors.New(stderr.String())
}
if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil {
@@ -213,7 +214,7 @@ func GetScrapyPipelines(s model.Spider) (res []string, err error) {
log.Errorf(err.Error())
log.Errorf(stderr.String())
debug.PrintStack()
return res, err
return res, errors.New(stderr.String())
}
if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil {

View File

@@ -45,7 +45,7 @@ const request = (method, path, params, data, others = {}) => {
if (response.status === 500) {
Message.error(response.data.error)
}
return e
return response
})
}

View File

@@ -119,7 +119,16 @@
>
<!--settings-->
<el-tab-pane :label="$t('Settings')" name="settings">
<div class="settings">
<div v-if="!spiderScrapySettings || !spiderScrapySettings.length" class="settings">
<span class="empty-text">
{{$t('No data available')}}
</span>
<template v-if="spiderScrapyErrors.settings">
<label class="errors-label">{{$t('Errors')}}:</label>
<el-alert type="error" v-html="getScrapyErrors('settings')"/>
</template>
</div>
<div v-else class="settings">
<div class="top-action-wrapper">
<el-button
type="primary"
@@ -232,7 +241,16 @@
<!--spiders-->
<el-tab-pane :label="$t('Spiders')" name="spiders">
<div class="spiders">
<div v-if="!spiderForm.spider_names || !spiderForm.spider_names.length" class="spiders">
<span class="empty-text error">
{{$t('No data available. Please check whether your spiders are missing dependencies or no spiders created.')}}
</span>
<template v-if="spiderScrapyErrors.spiders">
<label class="errors-label">{{$t('Errors')}}:</label>
<el-alert type="error" v-html="getScrapyErrors('spiders')"/>
</template>
</div>
<div v-else class="spiders">
<div class="action-wrapper">
<el-button
type="primary"
@@ -261,7 +279,16 @@
<!--items-->
<el-tab-pane label="Items" name="items">
<div class="items">
<div v-if="!spiderScrapyItems || !spiderScrapyItems.length" class="items">
<span class="empty-text">
{{$t('No data available')}}
</span>
<template v-if="spiderScrapyErrors.items">
<label class="errors-label">{{$t('Errors')}}:</label>
<el-alert type="error" v-html="getScrapyErrors('items')"/>
</template>
</div>
<div v-else class="items">
<div class="action-wrapper">
<el-button
type="primary"
@@ -345,6 +372,15 @@
<!--pipelines-->
<el-tab-pane label="Pipelines" name="pipelines">
<div v-if="!spiderScrapyPipelines || !spiderScrapyPipelines.length" class="pipelines">
<span class="empty-text">
{{$t('No data available')}}
</span>
<template v-if="spiderScrapyErrors.pipelines">
<label class="errors-label">{{$t('Errors')}}:</label>
<el-alert type="error" v-html="getScrapyErrors('pipelines')"/>
</template>
</div>
<div class="pipelines">
<ul class="list">
<li
@@ -376,7 +412,8 @@ export default {
'spiderForm',
'spiderScrapySettings',
'spiderScrapyItems',
'spiderScrapyPipelines'
'spiderScrapyPipelines',
'spiderScrapyErrors'
]),
activeParamData () {
if (this.activeParam.type === 'array') {
@@ -651,6 +688,10 @@ export default {
this.$set(this.loadingDict, spiderName, false)
}
this.$st.sendEv('爬虫详情', 'Scrapy 设置', '点击爬虫')
},
getScrapyErrors (type) {
if (!this.spiderScrapyErrors || !this.spiderScrapyErrors[type] || (typeof this.spiderScrapyErrors[type] !== 'string')) return ''
return this.$utils.html.htmlEscape(this.spiderScrapyErrors[type]).split('\n').join('<br/>')
}
}
}
@@ -780,4 +821,19 @@ export default {
.items >>> .custom-tree-node .el-input {
width: 240px;
}
.empty-text {
display: block;
margin-bottom: 20px;
}
.empty-text.error {
color: #f56c6c;
}
.errors-label {
color: #f56c6c;
display: block;
margin-bottom: 10px;
}
</style>

View File

@@ -37,6 +37,7 @@ export default {
Running: '进行中',
Finished: '已完成',
Error: '错误',
Errors: '错误',
NA: '未知',
Cancelled: '已取消',
Abnormal: '异常',
@@ -416,6 +417,8 @@ export default {
'Disclaimer': '免责声明',
'Please search dependencies': '请搜索依赖',
'No Data': '暂无数据',
'No data available': '暂无数据',
'No data available. Please check whether your spiders are missing dependencies or no spiders created.': '暂无数据请检查您的爬虫是否缺少依赖或者没有创建爬虫',
'Show installed': '查看已安装',
'Installing dependency successful': '安装依赖成功',
'Installing dependency failed': '安装依赖失败',

View File

@@ -19,6 +19,9 @@ const state = {
// spider scrapy pipelines
spiderScrapyPipelines: [],
// scrapy errors
spiderScrapyErrors: {},
// node to deploy/run
activeNode: {},
@@ -116,6 +119,13 @@ const mutations = {
},
SET_CONFIG_LIST_TS (state, value) {
state.configListTs = value
},
SET_SPIDER_SCRAPY_ERRORS (state, value) {
for (let key in value) {
if (value.hasOwnProperty(key)) {
Vue.set(state.spiderScrapyErrors, key, value[key])
}
}
}
}
@@ -142,11 +152,20 @@ const actions = {
},
async getSpiderScrapySpiders ({ state, commit }, id) {
const res = await request.get(`/spiders/${id}/scrapy/spiders`)
if (res.data.error) {
commit('SET_SPIDER_SCRAPY_ERRORS', { spiders: res.data.error })
return
}
state.spiderForm.spider_names = res.data.data
commit('SET_SPIDER_FORM', state.spiderForm)
commit('SET_SPIDER_SCRAPY_ERRORS', { spiders: '' })
},
async getSpiderScrapySettings ({ state, commit }, id) {
const res = await request.get(`/spiders/${id}/scrapy/settings`)
if (res.data.error) {
commit('SET_SPIDER_SCRAPY_ERRORS', { settings: res.data.error })
return
}
commit('SET_SPIDER_SCRAPY_SETTINGS', res.data.data.map(d => {
const key = d.key
const value = d.value
@@ -164,12 +183,17 @@ const actions = {
type
}
}))
commit('SET_SPIDER_SCRAPY_ERRORS', { settings: '' })
},
async saveSpiderScrapySettings ({ state }, id) {
return request.post(`/spiders/${id}/scrapy/settings`, state.spiderScrapySettings)
},
async getSpiderScrapyItems ({ state, commit }, id) {
const res = await request.get(`/spiders/${id}/scrapy/items`)
if (res.data.error) {
commit('SET_SPIDER_SCRAPY_ERRORS', { items: res.data.error })
return
}
let nodeId = 0
commit('SET_SPIDER_SCRAPY_ITEMS', res.data.data.map(d => {
d.id = nodeId++
@@ -186,6 +210,7 @@ const actions = {
})
return d
}))
commit('SET_SPIDER_SCRAPY_ERRORS', { items: '' })
},
async saveSpiderScrapyItems ({ state }, id) {
return request.post(`/spiders/${id}/scrapy/items`, state.spiderScrapyItems.map(d => {
@@ -196,7 +221,12 @@ const actions = {
},
async getSpiderScrapyPipelines ({ state, commit }, id) {
const res = await request.get(`/spiders/${id}/scrapy/pipelines`)
if (res.data.error) {
commit('SET_SPIDER_SCRAPY_ERRORS', { pipelines: res.data.error })
return
}
commit('SET_SPIDER_SCRAPY_PIPELINES', res.data.data)
commit('SET_SPIDER_SCRAPY_ERRORS', { pipelines: '' })
},
async saveSpiderScrapyPipelines ({ state }, id) {
return request.post(`/spiders/${id}/scrapy/pipelines`, state.spiderScrapyPipelines)

View File

@@ -0,0 +1,16 @@
export default {
htmlEscape: text => {
return text.replace(/[<>"&]/g, function (match, pos, originalText) {
switch (match) {
case '<':
return '&lt;'
case '>':
return '&gt;'
case '&':
return '&amp;'
case '"':
return '&quot;'
}
})
}
}

View File

@@ -4,6 +4,7 @@ import tour from './tour'
import log from './log'
import scrapy from './scrapy'
import doc from './doc'
import html from './html'
export default {
stats,
@@ -11,5 +12,6 @@ export default {
tour,
log,
scrapy,
doc
doc,
html
}