修复爬虫被覆盖的问题

This commit is contained in:
marvzhang
2019-12-04 14:54:49 +08:00
parent be4a5f6667
commit 97ca3b74b8
5 changed files with 14 additions and 6 deletions

View File

@@ -254,8 +254,8 @@ func PostConfigSpiderConfig(c *gin.Context) {
return
}
// 根据序列化后的数据处理爬虫文件
if err := services.ProcessSpiderFilesFromConfigData(spider, configData); err != nil {
// 校验configData
if err := services.ValidateSpiderfile(configData); err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
@@ -266,6 +266,12 @@ func PostConfigSpiderConfig(c *gin.Context) {
return
}
// 根据序列化后的数据处理爬虫文件
if err := services.ProcessSpiderFilesFromConfigData(spider, configData); err != nil {
HandleError(http.StatusInternalServerError, c, err)
return
}
c.JSON(http.StatusOK, Response{
Status: "ok",
Message: "success",

View File

@@ -245,7 +245,12 @@ func GenerateSpiderfileFromConfigData(spider model.Spider, configData entity.Con
}
// 打开文件
f, err := os.OpenFile(sfPath, os.O_WRONLY|os.O_TRUNC, 0777)
var f *os.File
if utils.Exists(sfPath) {
f, err = os.OpenFile(sfPath, os.O_WRONLY|os.O_TRUNC, 0777)
} else {
f, err = os.OpenFile(sfPath, os.O_CREATE, 0777)
}
if err != nil {
return err
}

View File

@@ -100,7 +100,6 @@ func (s *SpiderSync) Download() {
// 创建临时文件
tmpFilePath := filepath.Join(tmpPath, randomId.String()+".zip")
tmpFile := utils.OpenFile(tmpFilePath)
defer utils.Close(tmpFile)
// 将该文件写入临时文件
if _, err := io.Copy(tmpFile, f); err != nil {

View File

@@ -167,7 +167,6 @@ func DeCompress(srcFile *os.File, dstPath string) error {
debug.PrintStack()
continue
}
defer Close(newFile)
// 拷贝该文件到新文件中
if _, err := io.Copy(newFile, srcFile); err != nil {

View File

@@ -281,7 +281,6 @@ import {
import dayjs from 'dayjs'
import CrawlConfirmDialog from '../../components/Common/CrawlConfirmDialog'
import StatusTag from '../../components/Status/StatusTag'
import request from '../../api/request'
export default {
name: 'SpiderList',