加入爬虫文件管理后端 API

This commit is contained in:
marvzhang
2019-12-24 09:11:42 +08:00
parent 7eaad2cb7c
commit 936adf6f76
2 changed files with 128 additions and 17 deletions

View File

@@ -129,18 +129,21 @@ func main() {
authGroup.GET("/nodes/:id/system", routes.GetSystemInfo) // 节点任务列表
authGroup.DELETE("/nodes/:id", routes.DeleteNode) // 删除节点
// 爬虫
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
authGroup.POST("/spiders", routes.PutSpider) // 上传爬虫 TODO: 名称不对
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫目录写入
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
authGroup.GET("/spider/types", routes.GetSpiderTypes) // 爬虫类型
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
authGroup.POST("/spiders", routes.PutSpider) // 上传爬虫 TODO: 名称不对
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
authGroup.GET("/spiders/:id/tasks", routes.GetSpiderTasks) // 爬虫任务列表
authGroup.GET("/spiders/:id/file", routes.GetSpiderFile) // 爬虫文件读取
authGroup.POST("/spiders/:id/file", routes.PostSpiderFile) // 爬虫文件更改
authGroup.PUT("/spiders/:id/file", routes.PutSpiderFile) // 爬虫文件创建
authGroup.DELETE("/spiders/:id/file", routes.DeleteSpiderFile) // 爬虫文件删除
authGroup.DELETE("/spiders/:id/file", routes.RenameSpiderFile) // 爬虫文件重命名
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
authGroup.GET("/spider/types", routes.GetSpiderTypes) // 爬虫类型
// 可配置爬虫
authGroup.GET("/config_spiders/:id/config", routes.GetConfigSpiderConfig) // 获取可配置爬虫配置
authGroup.POST("/config_spiders/:id/config", routes.PostConfigSpiderConfig) // 更改可配置爬虫配置

View File

@@ -7,6 +7,7 @@ import (
"crawlab/model"
"crawlab/services"
"crawlab/utils"
"fmt"
"github.com/apex/log"
"github.com/gin-gonic/gin"
"github.com/globalsign/mgo"
@@ -17,6 +18,7 @@ import (
"io/ioutil"
"net/http"
"os"
"path"
"path/filepath"
"runtime/debug"
"strconv"
@@ -283,6 +285,14 @@ func GetSpiderDir(c *gin.Context) {
})
}
// 爬虫文件管理
type SpiderFileReqBody struct {
Path string `json:"path"`
Content string `json:"content"`
NewPath string `json:"new_path"`
}
func GetSpiderFile(c *gin.Context) {
// 爬虫ID
id := c.Param("id")
@@ -311,11 +321,6 @@ func GetSpiderFile(c *gin.Context) {
})
}
type SpiderFileReqBody struct {
Path string `json:"path"`
Content string `json:"content"`
}
func PostSpiderFile(c *gin.Context) {
// 爬虫ID
id := c.Param("id")
@@ -347,6 +352,109 @@ func PostSpiderFile(c *gin.Context) {
})
}
func PutSpiderFile(c *gin.Context) {
spiderId := c.Param("id")
var reqBody SpiderFileReqBody
if err := c.ShouldBindJSON(&reqBody); err != nil {
HandleError(http.StatusBadRequest, c, err)
return
}
spider, err := model.GetSpider(bson.ObjectIdHex(spiderId))
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
// 文件路径
filePath := path.Join(spider.Src, reqBody.Path)
// 如果文件已存在,则报错
if utils.Exists(filePath) {
HandleErrorF(http.StatusInternalServerError, c, fmt.Sprintf(`%s already exists`, filePath))
return
}
// 写入文件
if err := ioutil.WriteFile(filePath, []byte(reqBody.Content), 0777); err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
})
}
func DeleteSpiderFile(c *gin.Context) {
spiderId := c.Param("id")
var reqBody SpiderFileReqBody
if err := c.ShouldBindJSON(&reqBody); err != nil {
HandleError(http.StatusBadRequest, c, err)
return
}
spider, err := model.GetSpider(bson.ObjectIdHex(spiderId))
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
filePath := path.Join(spider.Src, reqBody.Path)
if err := os.RemoveAll(filePath); err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
})
}
func RenameSpiderFile(c *gin.Context) {
spiderId := c.Param("id")
var reqBody SpiderFileReqBody
if err := c.ShouldBindJSON(&reqBody); err != nil {
HandleError(http.StatusBadRequest, c, err)
}
spider, err := model.GetSpider(bson.ObjectIdHex(spiderId))
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
// 原文件路径
filePath := path.Join(spider.Src, reqBody.Path)
newFilePath := path.Join(spider.Src, reqBody.NewPath)
// 如果新文件已存在,则报错
if utils.Exists(newFilePath) {
HandleErrorF(http.StatusInternalServerError, c, fmt.Sprintf(`%s already exists`, newFilePath))
return
}
// 读取原文件
content, err := ioutil.ReadFile(filePath)
if err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
// 写入新文件
if err := ioutil.WriteFile(newFilePath, []byte(content), 0777); err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
// 删除原文件
if err := os.RemoveAll(filePath); err != nil {
HandleError(http.StatusInternalServerError, c, err)
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",
})
}
// 爬虫类型
func GetSpiderTypes(c *gin.Context) {
types, err := model.GetSpiderTypes()