加入scrapy pipelines支持

This commit is contained in:
marvzhang
2020-02-21 17:12:57 +08:00
parent 0ce810da20
commit d7437c4eb1
6 changed files with 93 additions and 9 deletions

View File

@@ -180,6 +180,7 @@ func main() {
authGroup.POST("/spiders/:id/scrapy/settings", routes.PostSpiderScrapySettings) // Scrapy 爬虫修改设置
authGroup.GET("/spiders/:id/scrapy/items", routes.GetSpiderScrapyItems) // Scrapy 爬虫 items
authGroup.POST("/spiders/:id/scrapy/items", routes.PostSpiderScrapyItems) // Scrapy 爬虫修改 items
authGroup.GET("/spiders/:id/scrapy/pipelines", routes.GetSpiderScrapyPipelines) // Scrapy 爬虫 pipelines
authGroup.POST("/spiders/:id/git/sync", routes.PostSpiderSyncGit) // 爬虫 Git 同步
authGroup.POST("/spiders/:id/git/reset", routes.PostSpiderResetGit) // 爬虫 Git 重置
}

View File

@@ -1125,6 +1125,33 @@ func PostSpiderScrapyItems(c *gin.Context) {
})
}
func GetSpiderScrapyPipelines(c *gin.Context) {
id := c.Param("id")
if !bson.IsObjectIdHex(id) {
HandleErrorF(http.StatusBadRequest, c, "spider_id is invalid")
return
}
spider, err := model.GetSpider(bson.ObjectIdHex(id))
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
data, err := services.GetScrapyPipelines(spider)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
Data: data,
})
}
func PostSpiderSyncGit(c *gin.Context) {
id := c.Param("id")

View File

@@ -201,6 +201,30 @@ func SaveScrapyItems(s model.Spider, itemsData []entity.ScrapyItem) (err error)
return
}
func GetScrapyPipelines(s model.Spider) (res []string, err error) {
var stdout bytes.Buffer
var stderr bytes.Buffer
cmd := exec.Command("crawlab", "pipelines")
cmd.Dir = s.Src
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
log.Errorf(err.Error())
log.Errorf(stderr.String())
debug.PrintStack()
return res, err
}
if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil {
log.Errorf(err.Error())
debug.PrintStack()
return res, err
}
return res, nil
}
func CreateScrapySpider(s model.Spider, name string, domain string, template string) (err error) {
var stdout bytes.Buffer
var stderr bytes.Buffer

View File

@@ -243,13 +243,13 @@
{{$t('Add Spider')}}
</el-button>
</div>
<ul class="spider-list">
<ul class="list">
<li
v-for="s in spiderForm.spider_names"
:key="s"
class="spider-item"
class="item"
>
<i class="el-icon-caret-right"></i>
<i class="el-icon-star-on"></i>
{{s}}
</li>
</ul>
@@ -280,6 +280,7 @@
<span class="custom-tree-node" :class="`level-${data.level}`" slot-scope="{ node, data }">
<template v-if="data.level === 1">
<span v-if="!node.isEdit" class="label" @click="onItemLabelEdit(node, data, $event)">
<i class="el-icon-star-on"></i>
{{ data.label }}
<i class="el-icon-edit"></i>
</span>
@@ -311,6 +312,7 @@
</template>
<template v-if="data.level === 2">
<span v-if="!node.isEdit" class="label" @click="onItemLabelEdit(node, data, $event)">
<i class="el-icon-arrow-right"></i>
{{ node.label }}
<i class="el-icon-edit"></i>
</span>
@@ -341,7 +343,18 @@
<!--pipelines-->
<el-tab-pane label="Pipelines" name="pipelines">
<div class="pipelines">
<ul class="list">
<li
v-for="s in spiderScrapyPipelines"
:key="s"
class="item"
>
<i class="el-icon-star-on"></i>
{{s}}
</li>
</ul>
</div>
</el-tab-pane>
<!--./pipelines-->
</el-tabs>
@@ -359,7 +372,8 @@ export default {
...mapState('spider', [
'spiderForm',
'spiderScrapySettings',
'spiderScrapyItems'
'spiderScrapyItems',
'spiderScrapyPipelines'
]),
activeParamData () {
if (this.activeParam.type === 'array') {
@@ -688,18 +702,22 @@ export default {
border-bottom: 1px solid #DCDFE6;
}
.spiders .spider-list {
.pipelines .list,
.spiders .list {
list-style: none;
padding: 0;
margin: 0;
}
.spiders .spider-list .spider-item {
.pipelines .list .item,
.spiders .list .item {
font-size: 14px;
padding: 10px;
cursor: pointer;
}
.spiders .spider-list .spider-item:hover {
.pipelines .list .item:hover,
.spiders .list .item:hover {
background: #F5F7FA;
}

View File

@@ -16,6 +16,9 @@ const state = {
// spider scrapy items
spiderScrapyItems: [],
// spider scrapy pipelines
spiderScrapyPipelines: [],
// node to deploy/run
activeNode: {},
@@ -104,6 +107,9 @@ const mutations = {
},
SET_SPIDER_SCRAPY_ITEMS (state, value) {
state.spiderScrapyItems = value
},
SET_SPIDER_SCRAPY_PIPELINES (state, value) {
state.spiderScrapyPipelines = value
}
}
@@ -182,6 +188,13 @@ const actions = {
return d
}))
},
async getSpiderScrapyPipelines ({ state, commit }, id) {
const res = await request.get(`/spiders/${id}/scrapy/pipelines`)
commit('SET_SPIDER_SCRAPY_PIPELINES', res.data.data)
},
async saveSpiderScrapyPipelines ({ state }, id) {
return request.post(`/spiders/${id}/scrapy/pipelines`, state.spiderScrapyPipelines)
},
addSpiderScrapySpider ({ state }, payload) {
const { id, form } = payload
return request.put(`/spiders/${id}/scrapy/spiders`, form)

View File

@@ -224,7 +224,8 @@ export default {
await Promise.all([
this.$store.dispatch('spider/getSpiderScrapySpiders', this.$route.params.id),
this.$store.dispatch('spider/getSpiderScrapyItems', this.$route.params.id),
this.$store.dispatch('spider/getSpiderScrapySettings', this.$route.params.id)
this.$store.dispatch('spider/getSpiderScrapySettings', this.$route.params.id),
this.$store.dispatch('spider/getSpiderScrapyPipelines', this.$route.params.id)
])
}
},