mirror of
https://github.com/crawlab-team/crawlab.git
synced 2026-01-21 17:21:09 +01:00
加入scrapy spider跳转
This commit is contained in:
@@ -155,34 +155,35 @@ func main() {
|
||||
}
|
||||
// 爬虫
|
||||
{
|
||||
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
|
||||
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
|
||||
authGroup.PUT("/spiders", routes.PutSpider) // 添加爬虫
|
||||
authGroup.POST("/spiders", routes.UploadSpider) // 上传爬虫
|
||||
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
|
||||
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
|
||||
authGroup.POST("/spiders/:id/upload", routes.UploadSpiderFromId) // 上传爬虫(ID)
|
||||
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
|
||||
authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
|
||||
authGroup.GET("/spiders/:id/file/tree", routes.GetSpiderFileTree) // 爬虫文件目录树读取
|
||||
authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
|
||||
authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
|
||||
authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
|
||||
authGroup.PUT("/spiders/:id/dir", routes.PutSpiderDir) // 爬虫目录创建
|
||||
authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
|
||||
authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名
|
||||
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
|
||||
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
|
||||
authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务
|
||||
authGroup.GET("/spiders/:id/scrapy/spiders", routes.GetSpiderScrapySpiders) // Scrapy 爬虫名称列表
|
||||
authGroup.PUT("/spiders/:id/scrapy/spiders", routes.PutSpiderScrapySpiders) // Scrapy 爬虫创建爬虫
|
||||
authGroup.GET("/spiders/:id/scrapy/settings", routes.GetSpiderScrapySettings) // Scrapy 爬虫设置
|
||||
authGroup.POST("/spiders/:id/scrapy/settings", routes.PostSpiderScrapySettings) // Scrapy 爬虫修改设置
|
||||
authGroup.GET("/spiders/:id/scrapy/items", routes.GetSpiderScrapyItems) // Scrapy 爬虫 items
|
||||
authGroup.POST("/spiders/:id/scrapy/items", routes.PostSpiderScrapyItems) // Scrapy 爬虫修改 items
|
||||
authGroup.GET("/spiders/:id/scrapy/pipelines", routes.GetSpiderScrapyPipelines) // Scrapy 爬虫 pipelines
|
||||
authGroup.POST("/spiders/:id/git/sync", routes.PostSpiderSyncGit) // 爬虫 Git 同步
|
||||
authGroup.POST("/spiders/:id/git/reset", routes.PostSpiderResetGit) // 爬虫 Git 重置
|
||||
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
|
||||
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
|
||||
authGroup.PUT("/spiders", routes.PutSpider) // 添加爬虫
|
||||
authGroup.POST("/spiders", routes.UploadSpider) // 上传爬虫
|
||||
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
|
||||
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
|
||||
authGroup.POST("/spiders/:id/upload", routes.UploadSpiderFromId) // 上传爬虫(ID)
|
||||
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
|
||||
authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
|
||||
authGroup.GET("/spiders/:id/file/tree", routes.GetSpiderFileTree) // 爬虫文件目录树读取
|
||||
authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
|
||||
authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
|
||||
authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
|
||||
authGroup.PUT("/spiders/:id/dir", routes.PutSpiderDir) // 爬虫目录创建
|
||||
authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
|
||||
authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名
|
||||
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
|
||||
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
|
||||
authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务
|
||||
authGroup.GET("/spiders/:id/scrapy/spiders", routes.GetSpiderScrapySpiders) // Scrapy 爬虫名称列表
|
||||
authGroup.PUT("/spiders/:id/scrapy/spiders", routes.PutSpiderScrapySpiders) // Scrapy 爬虫创建爬虫
|
||||
authGroup.GET("/spiders/:id/scrapy/settings", routes.GetSpiderScrapySettings) // Scrapy 爬虫设置
|
||||
authGroup.POST("/spiders/:id/scrapy/settings", routes.PostSpiderScrapySettings) // Scrapy 爬虫修改设置
|
||||
authGroup.GET("/spiders/:id/scrapy/items", routes.GetSpiderScrapyItems) // Scrapy 爬虫 items
|
||||
authGroup.POST("/spiders/:id/scrapy/items", routes.PostSpiderScrapyItems) // Scrapy 爬虫修改 items
|
||||
authGroup.GET("/spiders/:id/scrapy/pipelines", routes.GetSpiderScrapyPipelines) // Scrapy 爬虫 pipelines
|
||||
authGroup.GET("/spiders/:id/scrapy/spider/filepath", routes.GetSpiderScrapySpiderFilepath) // Scrapy 爬虫 pipelines
|
||||
authGroup.POST("/spiders/:id/git/sync", routes.PostSpiderSyncGit) // 爬虫 Git 同步
|
||||
authGroup.POST("/spiders/:id/git/reset", routes.PostSpiderResetGit) // 爬虫 Git 重置
|
||||
}
|
||||
// 可配置爬虫
|
||||
{
|
||||
|
||||
@@ -1152,6 +1152,39 @@ func GetSpiderScrapyPipelines(c *gin.Context) {
|
||||
})
|
||||
}
|
||||
|
||||
func GetSpiderScrapySpiderFilepath(c *gin.Context) {
|
||||
id := c.Param("id")
|
||||
|
||||
spiderName := c.Query("spider_name")
|
||||
if spiderName == "" {
|
||||
HandleErrorF(http.StatusBadRequest, c, "spider_name is empty")
|
||||
return
|
||||
}
|
||||
|
||||
if !bson.IsObjectIdHex(id) {
|
||||
HandleErrorF(http.StatusBadRequest, c, "spider_id is invalid")
|
||||
return
|
||||
}
|
||||
|
||||
spider, err := model.GetSpider(bson.ObjectIdHex(id))
|
||||
if err != nil {
|
||||
HandleError(http.StatusInternalServerError, c, err)
|
||||
return
|
||||
}
|
||||
|
||||
data, err := services.GetScrapySpiderFilepath(spider, spiderName)
|
||||
if err != nil {
|
||||
HandleError(http.StatusInternalServerError, c, err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, Response{
|
||||
Status: "ok",
|
||||
Message: "success",
|
||||
Data: data,
|
||||
})
|
||||
}
|
||||
|
||||
func PostSpiderSyncGit(c *gin.Context) {
|
||||
id := c.Param("id")
|
||||
|
||||
|
||||
@@ -225,6 +225,26 @@ func GetScrapyPipelines(s model.Spider) (res []string, err error) {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func GetScrapySpiderFilepath(s model.Spider, spiderName string) (res string, err error) {
|
||||
var stdout bytes.Buffer
|
||||
var stderr bytes.Buffer
|
||||
|
||||
cmd := exec.Command("crawlab", "find_spider_filepath", "-n", spiderName)
|
||||
cmd.Dir = s.Src
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Errorf(err.Error())
|
||||
log.Errorf(stderr.String())
|
||||
debug.PrintStack()
|
||||
return res, err
|
||||
}
|
||||
|
||||
res = strings.Replace(stdout.String(), "\n", "", 1)
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func CreateScrapySpider(s model.Spider, name string, domain string, template string) (err error) {
|
||||
var stdout bytes.Buffer
|
||||
var stderr bytes.Buffer
|
||||
|
||||
@@ -410,6 +410,13 @@ export default {
|
||||
this.showFile = false
|
||||
this.$st.sendEv('爬虫详情', '文件', '删除')
|
||||
},
|
||||
clickSpider (filepath) {
|
||||
const node = this.$refs['tree'].getNode(filepath)
|
||||
const data = node.data
|
||||
this.onFileClick(data)
|
||||
node.parent.expanded = true
|
||||
node.parent.parent.expanded = true
|
||||
},
|
||||
clickPipeline () {
|
||||
const filename = 'pipelines.py'
|
||||
for (let i = 0; i < this.computedFileTree.length; i++) {
|
||||
@@ -417,9 +424,9 @@ export default {
|
||||
const nodeLv1 = this.$refs['tree'].getNode(dataLv1.path)
|
||||
if (dataLv1.is_dir) {
|
||||
for (let j = 0; j < dataLv1.children.length; j++) {
|
||||
const nodeLv2 = dataLv1.children[j]
|
||||
if (nodeLv2.path.match(filename)) {
|
||||
this.onFileClick(nodeLv2)
|
||||
const dataLv2 = dataLv1.children[j]
|
||||
if (dataLv2.path.match(filename)) {
|
||||
this.onFileClick(dataLv2)
|
||||
nodeLv1.expanded = true
|
||||
return
|
||||
}
|
||||
|
||||
@@ -248,6 +248,7 @@
|
||||
v-for="s in spiderForm.spider_names"
|
||||
:key="s"
|
||||
class="item"
|
||||
@click="onClickSpider(s)"
|
||||
>
|
||||
<i class="el-icon-star-on"></i>
|
||||
{{s}}
|
||||
@@ -634,6 +635,14 @@ export default {
|
||||
this.$message.success(this.$t('Saved successfully'))
|
||||
}
|
||||
this.$st.sendEv('爬虫详情', 'Scrapy 设置', '保存Items')
|
||||
},
|
||||
async onClickSpider (spiderName) {
|
||||
const res = await this.$store.dispatch('spider/getSpiderScrapySpiderFilepath', {
|
||||
id: this.$route.params.id,
|
||||
spiderName
|
||||
})
|
||||
this.$emit('click-spider', res.data.data)
|
||||
this.$st.sendEv('爬虫详情', 'Scrapy 设置', '点击爬虫')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,6 +195,10 @@ const actions = {
|
||||
async saveSpiderScrapyPipelines ({ state }, id) {
|
||||
return request.post(`/spiders/${id}/scrapy/pipelines`, state.spiderScrapyPipelines)
|
||||
},
|
||||
async getSpiderScrapySpiderFilepath ({ state, commit }, payload) {
|
||||
const { id, spiderName } = payload
|
||||
return request.get(`/spiders/${id}/scrapy/spider/filepath`, { spider_name: spiderName })
|
||||
},
|
||||
addSpiderScrapySpider ({ state }, payload) {
|
||||
const { id, form } = payload
|
||||
return request.put(`/spiders/${id}/scrapy/spiders`, form)
|
||||
|
||||
@@ -234,16 +234,12 @@ export default {
|
||||
this.$store.dispatch('spider/getSpiderScrapyPipelines', this.$route.params.id)
|
||||
])
|
||||
},
|
||||
async onClickScrapySpider () {
|
||||
this.redirectType = 'spider'
|
||||
async onClickScrapySpider (filepath) {
|
||||
this.activeTabName = 'files'
|
||||
await this.$store.dispatch('spider/getFileTree')
|
||||
if (this.currentPath) {
|
||||
await this.$store.dispatch('file/getFileContent', { path: this.currentPath })
|
||||
}
|
||||
this.$refs['file-list'].clickSpider(filepath)
|
||||
},
|
||||
async onClickScrapyPipeline () {
|
||||
this.redirectType = 'pipeline'
|
||||
this.activeTabName = 'files'
|
||||
await this.$store.dispatch('spider/getFileTree')
|
||||
this.$refs['file-list'].clickPipeline()
|
||||
|
||||
Reference in New Issue
Block a user