diff --git a/frontend/src/components/Scrapy/SpiderScrapy.vue b/frontend/src/components/Scrapy/SpiderScrapy.vue index 967b8a2c..73d05265 100644 --- a/frontend/src/components/Scrapy/SpiderScrapy.vue +++ b/frontend/src/components/Scrapy/SpiderScrapy.vue @@ -15,7 +15,7 @@ icon="el-icon-plus" @click="onSettingsActiveParamAdd" > - {{ $t('Add') }} + {{$t('Add')}} @@ -79,9 +79,9 @@ width="480px" > @@ -100,15 +100,15 @@ @@ -120,112 +120,123 @@
-
- - {{ $t('Add Variable') }} - - - {{ $t('Save') }} - +
+ + {{$t('No data available')}} + +
- - +
+ + {{$t('Add Variable')}} + + + {{$t('Save')}} + +
+ - -
- - - - - + +
+
@@ -233,121 +244,147 @@
-
- - {{ $t('Add Spider') }} - +
+ + {{$t('No data available. Please check whether your spiders are missing dependencies or no spiders created.')}} + + +
+
+
+ + {{$t('Add Spider')}} + +
+
    +
  • + + {{s}} + +
  • +
-
    -
  • - - {{ s }} - -
  • -
-
-
- - {{ $t('Add Item') }} - - - {{ $t('Save') }} - +
+ + {{$t('No data available')}} + +
- - - + + + +
+
+ + {{$t('No data available')}} + + +
  • - - {{ s }} + + {{s}}
@@ -368,295 +405,301 @@ + diff --git a/frontend/src/store/modules/spider.js b/frontend/src/store/modules/spider.js index c50cd14a..dc629216 100644 --- a/frontend/src/store/modules/spider.js +++ b/frontend/src/store/modules/spider.js @@ -19,6 +19,9 @@ const state = { // spider scrapy pipelines spiderScrapyPipelines: [], + // scrapy errors + spiderScrapyErrors: {}, + // node to deploy/run activeNode: {}, @@ -116,6 +119,11 @@ const mutations = { }, SET_CONFIG_LIST_TS(state, value) { state.configListTs = value + }, + SET_SPIDER_SCRAPY_ERRORS(state, value) { + for (const key in value) { + Vue.set(state.spiderScrapyErrors, key, value[key]) + } } } @@ -142,11 +150,20 @@ const actions = { }, async getSpiderScrapySpiders({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/spiders`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { spiders: res.data.error }) + return + } state.spiderForm.spider_names = res.data.data commit('SET_SPIDER_FORM', state.spiderForm) + commit('SET_SPIDER_SCRAPY_ERRORS', { spiders: '' }) }, async getSpiderScrapySettings({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/settings`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { settings: res.data.error }) + return + } commit('SET_SPIDER_SCRAPY_SETTINGS', res.data.data.map(d => { const key = d.key const value = d.value @@ -164,13 +181,17 @@ const actions = { type } })) + commit('SET_SPIDER_SCRAPY_ERRORS', { settings: '' }) }, async saveSpiderScrapySettings({ state }, id) { - return request.post(`/spiders/${id}/scrapy/settings`, - state.spiderScrapySettings) + return request.post(`/spiders/${id}/scrapy/settings`, state.spiderScrapySettings) }, async getSpiderScrapyItems({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/items`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { items: res.data.error }) + return + } let nodeId = 0 commit('SET_SPIDER_SCRAPY_ITEMS', res.data.data.map(d => { d.id = nodeId++ @@ -187,27 +208,30 @@ const actions = { }) return d })) + commit('SET_SPIDER_SCRAPY_ERRORS', { items: '' }) }, async saveSpiderScrapyItems({ state }, id) { - return request.post(`/spiders/${id}/scrapy/items`, - state.spiderScrapyItems.map(d => { - d.name = d.label - d.fields = d.children.map(f => f.label) - return d - })) + return request.post(`/spiders/${id}/scrapy/items`, state.spiderScrapyItems.map(d => { + d.name = d.label + d.fields = d.children.map(f => f.label) + return d + })) }, async getSpiderScrapyPipelines({ state, commit }, id) { const res = await request.get(`/spiders/${id}/scrapy/pipelines`) + if (res.data.error) { + commit('SET_SPIDER_SCRAPY_ERRORS', { pipelines: res.data.error }) + return + } commit('SET_SPIDER_SCRAPY_PIPELINES', res.data.data) + commit('SET_SPIDER_SCRAPY_ERRORS', { pipelines: '' }) }, async saveSpiderScrapyPipelines({ state }, id) { - return request.post(`/spiders/${id}/scrapy/pipelines`, - state.spiderScrapyPipelines) + return request.post(`/spiders/${id}/scrapy/pipelines`, state.spiderScrapyPipelines) }, async getSpiderScrapySpiderFilepath({ state, commit }, payload) { const { id, spiderName } = payload - return request.get(`/spiders/${id}/scrapy/spider/filepath`, - { spider_name: spiderName }) + return request.get(`/spiders/${id}/scrapy/spider/filepath`, { spider_name: spiderName }) }, addSpiderScrapySpider({ state }, payload) { const { id, form } = payload @@ -270,13 +294,11 @@ const actions = { return request.post(`/spiders/${state.spiderForm._id}/extract_fields`) }, postConfigSpiderConfig({ state }) { - return request.post(`/config_spiders/${state.spiderForm._id}/config`, - state.spiderForm.config) + return request.post(`/config_spiders/${state.spiderForm._id}/config`, state.spiderForm.config) }, saveConfigSpiderSpiderfile({ state, rootState }) { const content = rootState.file.fileContent - return request.post(`/config_spiders/${state.spiderForm._id}/spiderfile`, - { content }) + return request.post(`/config_spiders/${state.spiderForm._id}/spiderfile`, { content }) }, addConfigSpider({ state }) { return request.put(`/config_spiders`, state.spiderForm)