mirror of
https://github.com/crawlab-team/crawlab.git
synced 2026-01-22 17:31:03 +01:00
添加Scrapy设置后端
This commit is contained in:
@@ -154,26 +154,27 @@ func main() {
|
||||
}
|
||||
// 爬虫
|
||||
{
|
||||
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
|
||||
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
|
||||
authGroup.PUT("/spiders", routes.PutSpider) // 添加爬虫
|
||||
authGroup.POST("/spiders", routes.UploadSpider) // 上传爬虫
|
||||
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
|
||||
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
|
||||
authGroup.POST("/spiders/:id/upload", routes.UploadSpiderFromId) // 上传爬虫(ID)
|
||||
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
|
||||
authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
|
||||
authGroup.GET("/spiders/:id/file/tree", routes.GetSpiderFileTree) // 爬虫文件目录树读取
|
||||
authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
|
||||
authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
|
||||
authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
|
||||
authGroup.PUT("/spiders/:id/dir", routes.PutSpiderDir) // 爬虫目录创建
|
||||
authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
|
||||
authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名
|
||||
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
|
||||
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
|
||||
authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务
|
||||
authGroup.GET("/spiders/:id/scrapy/spiders", routes.GetSpiderScrapySpiders) // Scrapy 爬虫名称列表
|
||||
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
|
||||
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
|
||||
authGroup.PUT("/spiders", routes.PutSpider) // 添加爬虫
|
||||
authGroup.POST("/spiders", routes.UploadSpider) // 上传爬虫
|
||||
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
|
||||
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
|
||||
authGroup.POST("/spiders/:id/upload", routes.UploadSpiderFromId) // 上传爬虫(ID)
|
||||
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
|
||||
authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
|
||||
authGroup.GET("/spiders/:id/file/tree", routes.GetSpiderFileTree) // 爬虫文件目录树读取
|
||||
authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
|
||||
authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
|
||||
authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
|
||||
authGroup.PUT("/spiders/:id/dir", routes.PutSpiderDir) // 爬虫目录创建
|
||||
authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
|
||||
authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名
|
||||
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
|
||||
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
|
||||
authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务
|
||||
authGroup.GET("/spiders/:id/scrapy/spiders", routes.GetSpiderScrapySpiders) // Scrapy 爬虫名称列表
|
||||
authGroup.GET("/spiders/:id/scrapy/settings", routes.GetSpiderScrapySettings) // Scrapy 爬虫设置
|
||||
}
|
||||
// 可配置爬虫
|
||||
{
|
||||
|
||||
@@ -929,3 +929,30 @@ func GetSpiderScrapySpiders(c *gin.Context) {
|
||||
Data: spiderNames,
|
||||
})
|
||||
}
|
||||
|
||||
func GetSpiderScrapySettings(c *gin.Context) {
|
||||
id := c.Param("id")
|
||||
|
||||
if !bson.IsObjectIdHex(id) {
|
||||
HandleErrorF(http.StatusBadRequest, c, "spider_id is invalid")
|
||||
return
|
||||
}
|
||||
|
||||
spider, err := model.GetSpider(bson.ObjectIdHex(id))
|
||||
if err != nil {
|
||||
HandleError(http.StatusInternalServerError, c, err)
|
||||
return
|
||||
}
|
||||
|
||||
data, err := services.GetScrapySettings(spider)
|
||||
if err != nil {
|
||||
HandleError(http.StatusInternalServerError, c, err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, Response{
|
||||
Status: "ok",
|
||||
Message: "success",
|
||||
Data: data,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,7 +3,10 @@ package services
|
||||
import (
|
||||
"bytes"
|
||||
"crawlab/model"
|
||||
"encoding/json"
|
||||
"github.com/apex/log"
|
||||
"os/exec"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
)
|
||||
|
||||
@@ -16,6 +19,8 @@ func GetScrapySpiderNames(s model.Spider) ([]string, error) {
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Errorf(err.Error())
|
||||
debug.PrintStack()
|
||||
return []string{}, err
|
||||
}
|
||||
|
||||
@@ -30,3 +35,28 @@ func GetScrapySpiderNames(s model.Spider) ([]string, error) {
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func GetScrapySettings(s model.Spider) (res []map[string]interface{}, err error) {
|
||||
var stdout bytes.Buffer
|
||||
var stderr bytes.Buffer
|
||||
|
||||
cmd := exec.Command("crawlab", "settings")
|
||||
cmd.Dir = s.Src
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Errorf(err.Error())
|
||||
log.Errorf(stderr.String())
|
||||
debug.PrintStack()
|
||||
return res, err
|
||||
}
|
||||
|
||||
log.Infof(stdout.String())
|
||||
if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil {
|
||||
log.Errorf(err.Error())
|
||||
debug.PrintStack()
|
||||
return res, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
@@ -191,6 +191,7 @@ export default {
|
||||
'Parameter Type': '参数类别',
|
||||
'Other': '其他',
|
||||
'Scrapy Config': 'Scrapy 配置',
|
||||
'Scrapy Settings': 'Scrapy 设置',
|
||||
'Variable Name': '变量名',
|
||||
'Variable Type': '变量类型',
|
||||
'Variable Value': '变量值',
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
<el-tab-pane :label="$t('Overview')" name="overview">
|
||||
<spider-overview/>
|
||||
</el-tab-pane>
|
||||
<el-tab-pane v-if="isScrapy" :label="$t('Scrapy Config')" name="scrapy-config">
|
||||
<el-tab-pane v-if="isScrapy" :label="$t('Scrapy Settings')" name="scrapy-config">
|
||||
<spider-scrapy/>
|
||||
</el-tab-pane>
|
||||
<el-tab-pane v-if="isConfigurable" :label="$t('Config')" name="config">
|
||||
|
||||
Reference in New Issue
Block a user