diff --git a/backend/main.go b/backend/main.go
index 19c111d7..758525aa 100644
--- a/backend/main.go
+++ b/backend/main.go
@@ -154,26 +154,27 @@ func main() {
}
// 爬虫
{
- authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
- authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
- authGroup.PUT("/spiders", routes.PutSpider) // 添加爬虫
- authGroup.POST("/spiders", routes.UploadSpider) // 上传爬虫
- authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
- authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
- authGroup.POST("/spiders/:id/upload", routes.UploadSpiderFromId) // 上传爬虫(ID)
- authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
- authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
- authGroup.GET("/spiders/:id/file/tree", routes.GetSpiderFileTree) // 爬虫文件目录树读取
- authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
- authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
- authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
- authGroup.PUT("/spiders/:id/dir", routes.PutSpiderDir) // 爬虫目录创建
- authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
- authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名
- authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
- authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
- authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务
- authGroup.GET("/spiders/:id/scrapy/spiders", routes.GetSpiderScrapySpiders) // Scrapy 爬虫名称列表
+ authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
+ authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
+ authGroup.PUT("/spiders", routes.PutSpider) // 添加爬虫
+ authGroup.POST("/spiders", routes.UploadSpider) // 上传爬虫
+ authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
+ authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
+ authGroup.POST("/spiders/:id/upload", routes.UploadSpiderFromId) // 上传爬虫(ID)
+ authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
+ authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
+ authGroup.GET("/spiders/:id/file/tree", routes.GetSpiderFileTree) // 爬虫文件目录树读取
+ authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
+ authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
+ authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
+ authGroup.PUT("/spiders/:id/dir", routes.PutSpiderDir) // 爬虫目录创建
+ authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
+ authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名
+ authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
+ authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
+ authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务
+ authGroup.GET("/spiders/:id/scrapy/spiders", routes.GetSpiderScrapySpiders) // Scrapy 爬虫名称列表
+ authGroup.GET("/spiders/:id/scrapy/settings", routes.GetSpiderScrapySettings) // Scrapy 爬虫设置
}
// 可配置爬虫
{
diff --git a/backend/routes/spider.go b/backend/routes/spider.go
index 18ed5071..473fb480 100644
--- a/backend/routes/spider.go
+++ b/backend/routes/spider.go
@@ -929,3 +929,30 @@ func GetSpiderScrapySpiders(c *gin.Context) {
Data: spiderNames,
})
}
+
+func GetSpiderScrapySettings(c *gin.Context) {
+ id := c.Param("id")
+
+ if !bson.IsObjectIdHex(id) {
+ HandleErrorF(http.StatusBadRequest, c, "spider_id is invalid")
+ return
+ }
+
+ spider, err := model.GetSpider(bson.ObjectIdHex(id))
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ data, err := services.GetScrapySettings(spider)
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ c.JSON(http.StatusOK, Response{
+ Status: "ok",
+ Message: "success",
+ Data: data,
+ })
+}
diff --git a/backend/services/scrapy.go b/backend/services/scrapy.go
index 2e984e6d..f6e46820 100644
--- a/backend/services/scrapy.go
+++ b/backend/services/scrapy.go
@@ -3,7 +3,10 @@ package services
import (
"bytes"
"crawlab/model"
+ "encoding/json"
+ "github.com/apex/log"
"os/exec"
+ "runtime/debug"
"strings"
)
@@ -16,6 +19,8 @@ func GetScrapySpiderNames(s model.Spider) ([]string, error) {
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
+ log.Errorf(err.Error())
+ debug.PrintStack()
return []string{}, err
}
@@ -30,3 +35,28 @@ func GetScrapySpiderNames(s model.Spider) ([]string, error) {
return res, nil
}
+
+func GetScrapySettings(s model.Spider) (res []map[string]interface{}, err error) {
+ var stdout bytes.Buffer
+ var stderr bytes.Buffer
+
+ cmd := exec.Command("crawlab", "settings")
+ cmd.Dir = s.Src
+ cmd.Stdout = &stdout
+ cmd.Stderr = &stderr
+ if err := cmd.Run(); err != nil {
+ log.Errorf(err.Error())
+ log.Errorf(stderr.String())
+ debug.PrintStack()
+ return res, err
+ }
+
+ log.Infof(stdout.String())
+ if err := json.Unmarshal([]byte(stdout.String()), &res); err != nil {
+ log.Errorf(err.Error())
+ debug.PrintStack()
+ return res, err
+ }
+
+ return res, nil
+}
diff --git a/frontend/src/i18n/zh.js b/frontend/src/i18n/zh.js
index a8497eed..821b24e7 100644
--- a/frontend/src/i18n/zh.js
+++ b/frontend/src/i18n/zh.js
@@ -191,6 +191,7 @@ export default {
'Parameter Type': '参数类别',
'Other': '其他',
'Scrapy Config': 'Scrapy 配置',
+ 'Scrapy Settings': 'Scrapy 设置',
'Variable Name': '变量名',
'Variable Type': '变量类型',
'Variable Value': '变量值',
diff --git a/frontend/src/views/spider/SpiderDetail.vue b/frontend/src/views/spider/SpiderDetail.vue
index 8200fe68..7dcbec09 100644
--- a/frontend/src/views/spider/SpiderDetail.vue
+++ b/frontend/src/views/spider/SpiderDetail.vue
@@ -22,7 +22,7 @@
-
+