diff --git a/Dockerfile b/Dockerfile
index 893cf6fe..0809a0ba 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,7 +14,8 @@ ADD ./frontend /app
WORKDIR /app
# install frontend
-RUN npm install -g yarn && yarn install
+RUN npm config set unsafe-perm true
+RUN npm install -g yarn && yarn install --registry=https://registry.npm.taobao.org
RUN npm run build:prod
@@ -56,4 +57,4 @@ EXPOSE 8080
EXPOSE 8000
# start backend
-CMD ["/bin/sh", "/app/docker_init.sh"]
\ No newline at end of file
+CMD ["/bin/sh", "/app/docker_init.sh"]
diff --git a/backend/model/log.go b/backend/model/log.go
index 77e5094f..abb77ed9 100644
--- a/backend/model/log.go
+++ b/backend/model/log.go
@@ -24,7 +24,7 @@ func GetLocalLog(logPath string) (fileBytes []byte, err error) {
}
defer utils.Close(f)
- const bufLen = 2 * 1024 * 1024
+ const bufLen = 1 * 1024 * 1024
logBuf := make([]byte, bufLen)
off := int64(0)
diff --git a/backend/model/node.go b/backend/model/node.go
index 23dc3f8a..1c63fc3e 100644
--- a/backend/model/node.go
+++ b/backend/model/node.go
@@ -4,6 +4,7 @@ import (
"crawlab/constants"
"crawlab/database"
"crawlab/services/register"
+ "errors"
"github.com/apex/log"
"github.com/globalsign/mgo"
"github.com/globalsign/mgo/bson"
@@ -156,15 +157,20 @@ func GetNodeList(filter interface{}) ([]Node, error) {
}
func GetNode(id bson.ObjectId) (Node, error) {
+ var node Node
+
+ if id.Hex() == "" {
+ log.Infof("id is empty")
+ debug.PrintStack()
+ return node, errors.New("id is empty")
+ }
+
s, c := database.GetCol("nodes")
defer s.Close()
- var node Node
if err := c.FindId(id).One(&node); err != nil {
- if err != mgo.ErrNotFound {
- log.Errorf(err.Error())
- debug.PrintStack()
- }
+ log.Errorf(err.Error())
+ debug.PrintStack()
return node, err
}
return node, nil
diff --git a/backend/model/schedule.go b/backend/model/schedule.go
index 6415e22b..36799ac3 100644
--- a/backend/model/schedule.go
+++ b/backend/model/schedule.go
@@ -16,6 +16,7 @@ type Schedule struct {
Description string `json:"description" bson:"description"`
SpiderId bson.ObjectId `json:"spider_id" bson:"spider_id"`
NodeId bson.ObjectId `json:"node_id" bson:"node_id"`
+ NodeKey string `json:"node_key" bson:"node_key"`
Cron string `json:"cron" bson:"cron"`
EntryId cron.EntryID `json:"entry_id" bson:"entry_id"`
Param string `json:"param" bson:"param"`
@@ -38,6 +39,33 @@ func (sch *Schedule) Save() error {
return nil
}
+func (sch *Schedule) Delete() error {
+ s, c := database.GetCol("schedules")
+ defer s.Close()
+ return c.RemoveId(sch.Id)
+}
+
+func (sch *Schedule) SyncNodeIdAndSpiderId(node Node, spider Spider) {
+ sch.syncNodeId(node)
+ sch.syncSpiderId(spider)
+}
+
+func (sch *Schedule) syncNodeId(node Node) {
+ if node.Id.Hex() == sch.NodeId.Hex() {
+ return
+ }
+ sch.NodeId = node.Id
+ _ = sch.Save()
+}
+
+func (sch *Schedule) syncSpiderId(spider Spider) {
+ if spider.Id.Hex() == sch.SpiderId.Hex() {
+ return
+ }
+ sch.SpiderId = spider.Id
+ _ = sch.Save()
+}
+
func GetScheduleList(filter interface{}) ([]Schedule, error) {
s, c := database.GetCol("schedules")
defer s.Close()
@@ -47,11 +75,12 @@ func GetScheduleList(filter interface{}) ([]Schedule, error) {
return schedules, err
}
- for i, schedule := range schedules {
+ var schs []Schedule
+ for _, schedule := range schedules {
// 获取节点名称
if schedule.NodeId == bson.ObjectIdHex(constants.ObjectIdNull) {
// 选择所有节点
- schedules[i].NodeName = "All Nodes"
+ schedule.NodeName = "All Nodes"
} else {
// 选择单一节点
node, err := GetNode(schedule.NodeId)
@@ -59,7 +88,7 @@ func GetScheduleList(filter interface{}) ([]Schedule, error) {
log.Errorf(err.Error())
continue
}
- schedules[i].NodeName = node.Name
+ schedule.NodeName = node.Name
}
// 获取爬虫名称
@@ -67,11 +96,13 @@ func GetScheduleList(filter interface{}) ([]Schedule, error) {
if err != nil {
log.Errorf("get spider by id: %s, error: %s", schedule.SpiderId.Hex(), err.Error())
debug.PrintStack()
+ _ = schedule.Delete()
continue
}
- schedules[i].SpiderName = spider.Name
+ schedule.SpiderName = spider.Name
+ schs = append(schs, schedule)
}
- return schedules, nil
+ return schs, nil
}
func GetSchedule(id bson.ObjectId) (Schedule, error) {
@@ -93,7 +124,12 @@ func UpdateSchedule(id bson.ObjectId, item Schedule) error {
if err := c.FindId(id).One(&result); err != nil {
return err
}
+ node, err := GetNode(item.NodeId)
+ if err != nil {
+ return err
+ }
+ item.NodeKey = node.Key
if err := item.Save(); err != nil {
return err
}
@@ -104,9 +140,15 @@ func AddSchedule(item Schedule) error {
s, c := database.GetCol("schedules")
defer s.Close()
+ node, err := GetNode(item.NodeId)
+ if err != nil {
+ return err
+ }
+
item.Id = bson.NewObjectId()
item.CreateTs = time.Now()
item.UpdateTs = time.Now()
+ item.NodeKey = node.Key
if err := c.Insert(&item); err != nil {
debug.PrintStack()
diff --git a/backend/model/spider.go b/backend/model/spider.go
index df1bf3e5..5c2c92e8 100644
--- a/backend/model/spider.go
+++ b/backend/model/spider.go
@@ -98,6 +98,12 @@ func (spider *Spider) GetLastTask() (Task, error) {
return tasks[0], nil
}
+func (spider *Spider) Delete() error {
+ s, c := database.GetCol("spiders")
+ defer s.Close()
+ return c.RemoveId(spider.Id)
+}
+
// 爬虫列表
func GetSpiderList(filter interface{}, skip int, limit int) ([]Spider, int, error) {
s, c := database.GetCol("spiders")
diff --git a/backend/model/task.go b/backend/model/task.go
index f568b7fe..df046ecc 100644
--- a/backend/model/task.go
+++ b/backend/model/task.go
@@ -4,7 +4,6 @@ import (
"crawlab/constants"
"crawlab/database"
"github.com/apex/log"
- "github.com/globalsign/mgo"
"github.com/globalsign/mgo/bson"
"runtime/debug"
"time"
@@ -118,20 +117,16 @@ func GetTaskList(filter interface{}, skip int, limit int, sortKey string) ([]Tas
for i, task := range tasks {
// 获取爬虫名称
spider, err := task.GetSpider()
- if err == mgo.ErrNotFound {
- // do nothing
- } else if err != nil {
- return tasks, err
+ if spider.Id.Hex() == "" || err != nil {
+ _ = spider.Delete()
} else {
tasks[i].SpiderName = spider.DisplayName
}
// 获取节点名称
node, err := task.GetNode()
- if err == mgo.ErrNotFound {
- // do nothing
- } else if err != nil {
- return tasks, err
+ if node.Id.Hex() == "" || err != nil {
+ _ = task.Delete()
} else {
tasks[i].NodeName = node.Name
}
diff --git a/backend/routes/node.go b/backend/routes/node.go
index f86c152d..7d030773 100644
--- a/backend/routes/node.go
+++ b/backend/routes/node.go
@@ -15,9 +15,9 @@ func GetNodeList(c *gin.Context) {
return
}
- for i, node := range nodes {
- nodes[i].IsMaster = services.IsMasterNode(node.Id.Hex())
- }
+ //for i, node := range nodes {
+ // nodes[i].IsMaster = services.IsMasterNode(node.Id.Hex())
+ //}
c.JSON(http.StatusOK, Response{
Status: "ok",
@@ -109,11 +109,11 @@ func GetSystemInfo(c *gin.Context) {
})
}
-func DeleteNode(c *gin.Context) {
+func DeleteNode(c *gin.Context) {
id := c.Param("id")
node, err := model.GetNode(bson.ObjectIdHex(id))
if err != nil {
- HandleError(http.StatusInternalServerError, c ,err)
+ HandleError(http.StatusInternalServerError, c, err)
return
}
err = node.Delete()
diff --git a/backend/routes/schedule.go b/backend/routes/schedule.go
index b447abb5..73b75323 100644
--- a/backend/routes/schedule.go
+++ b/backend/routes/schedule.go
@@ -1,7 +1,6 @@
package routes
import (
- "crawlab/constants"
"crawlab/model"
"crawlab/services"
"github.com/gin-gonic/gin"
@@ -46,13 +45,14 @@ func PostSchedule(c *gin.Context) {
HandleError(http.StatusBadRequest, c, err)
return
}
- newItem.Id = bson.ObjectIdHex(id)
- // 如果node_id为空,则置为空ObjectId
- if newItem.NodeId == "" {
- newItem.NodeId = bson.ObjectIdHex(constants.ObjectIdNull)
+ // 验证cron表达式
+ if err := services.ParserCron(newItem.Cron); err != nil {
+ HandleError(http.StatusOK, c, err)
+ return
}
+ newItem.Id = bson.ObjectIdHex(id)
// 更新数据库
if err := model.UpdateSchedule(bson.ObjectIdHex(id), newItem); err != nil {
HandleError(http.StatusInternalServerError, c, err)
@@ -80,9 +80,10 @@ func PutSchedule(c *gin.Context) {
return
}
- // 如果node_id为空,则置为空ObjectId
- if item.NodeId == "" {
- item.NodeId = bson.ObjectIdHex(constants.ObjectIdNull)
+ // 验证cron表达式
+ if err := services.ParserCron(item.Cron); err != nil {
+ HandleError(http.StatusOK, c, err)
+ return
}
// 更新数据库
diff --git a/backend/services/log.go b/backend/services/log.go
index 6766cd9e..5b5cd7ae 100644
--- a/backend/services/log.go
+++ b/backend/services/log.go
@@ -15,6 +15,7 @@ import (
"os"
"path/filepath"
"runtime/debug"
+ "time"
)
// 任务日志频道映射
@@ -45,8 +46,14 @@ func GetRemoteLog(task model.Task) (logStr string, err error) {
// 生成频道,等待获取log
ch := TaskLogChanMap.ChanBlocked(task.Id)
- // 此处阻塞,等待结果
- logStr = <-ch
+ select {
+ case logStr = <-ch:
+ log.Infof("get remote log")
+ break
+ case <-time.After(30 * time.Second):
+ logStr = "get remote log timeout"
+ break
+ }
return logStr, nil
}
diff --git a/backend/services/msg_handler/handler.go b/backend/services/msg_handler/handler.go
index 848e0c5d..b8b8e231 100644
--- a/backend/services/msg_handler/handler.go
+++ b/backend/services/msg_handler/handler.go
@@ -3,6 +3,7 @@ package msg_handler
import (
"crawlab/constants"
"crawlab/entity"
+ "github.com/apex/log"
)
type Handler interface {
@@ -10,6 +11,7 @@ type Handler interface {
}
func GetMsgHandler(msg entity.NodeMessage) Handler {
+ log.Infof("received msg , type is : %s", msg.Type)
if msg.Type == constants.MsgTypeGetLog || msg.Type == constants.MsgTypeRemoveLog {
// 日志相关
return &Log{
diff --git a/backend/services/msg_handler/msg_log.go b/backend/services/msg_handler/msg_log.go
index 3b1416eb..993fad9a 100644
--- a/backend/services/msg_handler/msg_log.go
+++ b/backend/services/msg_handler/msg_log.go
@@ -40,9 +40,12 @@ func (g *Log) get() error {
msgSd.Log = utils.BytesToString(logStr)
}
// 发布消息给主节点
- if err := database.Pub(constants.ChannelMasterNode, msgSd); err != nil {
+ if err := utils.Pub(constants.ChannelMasterNode, msgSd); err != nil {
+ log.Errorf("pub log to master node error: %s", err.Error())
+ debug.PrintStack()
return err
}
+ log.Infof(msgSd.Log)
return nil
}
diff --git a/backend/services/node.go b/backend/services/node.go
index de33f6d3..dffe5ac9 100644
--- a/backend/services/node.go
+++ b/backend/services/node.go
@@ -88,6 +88,8 @@ func UpdateNodeStatus() {
handleNodeInfo(key, data)
}
+ // 重新获取list
+ list, _ = database.RedisClient.HKeys("nodes")
// 重置不在redis的key为offline
model.ResetNodeStatusToOffline(list)
}
@@ -110,13 +112,15 @@ func handleNodeInfo(key string, data Data) {
if err := c.Find(bson.M{"key": key}).One(&node); err != nil {
// 数据库不存在该节点
node = model.Node{
- Key: key,
- Name: data.Ip,
- Ip: data.Ip,
- Port: "8000",
- Mac: data.Mac,
- Status: constants.StatusOnline,
- IsMaster: data.Master,
+ Key: key,
+ Name: data.Ip,
+ Ip: data.Ip,
+ Port: "8000",
+ Mac: data.Mac,
+ Status: constants.StatusOnline,
+ IsMaster: data.Master,
+ UpdateTs: time.Now(),
+ UpdateTsUnix: time.Now().Unix(),
}
if err := node.Add(); err != nil {
log.Errorf(err.Error())
@@ -125,6 +129,8 @@ func handleNodeInfo(key string, data Data) {
} else {
// 数据库存在该节点
node.Status = constants.StatusOnline
+ node.UpdateTs = time.Now()
+ node.UpdateTsUnix = time.Now().Unix()
if err := node.Save(); err != nil {
log.Errorf(err.Error())
return
@@ -205,6 +211,8 @@ func WorkerNodeCallback(message redis.Message) (err error) {
// 反序列化
msg := utils.GetMessage(message)
if err := msg_handler.GetMsgHandler(*msg).Handle(); err != nil {
+ log.Errorf("msg handler error: %s", err.Error())
+ debug.PrintStack()
return err
}
return nil
diff --git a/backend/services/schedule.go b/backend/services/schedule.go
index 58cdf628..d4c1635b 100644
--- a/backend/services/schedule.go
+++ b/backend/services/schedule.go
@@ -5,7 +5,7 @@ import (
"crawlab/lib/cron"
"crawlab/model"
"github.com/apex/log"
- uuid "github.com/satori/go.uuid"
+ "github.com/satori/go.uuid"
"runtime/debug"
)
@@ -17,7 +17,22 @@ type Scheduler struct {
func AddTask(s model.Schedule) func() {
return func() {
- nodeId := s.NodeId
+ node, err := model.GetNodeByKey(s.NodeKey)
+ if err != nil || node.Id.Hex() == "" {
+ log.Errorf("get node by key error: %s", err.Error())
+ debug.PrintStack()
+ return
+ }
+
+ spider := model.GetSpiderByName(s.SpiderName)
+ if spider == nil || spider.Id.Hex() == "" {
+ log.Errorf("get spider by name error: %s", err.Error())
+ debug.PrintStack()
+ return
+ }
+
+ // 同步ID到定时任务
+ s.SyncNodeIdAndSpiderId(node, *spider)
// 生成任务ID
id := uuid.NewV4()
@@ -25,8 +40,8 @@ func AddTask(s model.Schedule) func() {
// 生成任务模型
t := model.Task{
Id: id.String(),
- SpiderId: s.SpiderId,
- NodeId: nodeId,
+ SpiderId: spider.Id,
+ NodeId: node.Id,
Status: constants.StatusPending,
Param: s.Param,
}
@@ -107,6 +122,18 @@ func (s *Scheduler) RemoveAll() {
}
}
+// 验证cron表达式是否正确
+func ParserCron(spec string) error {
+ parser := cron.NewParser(
+ cron.Second | cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow | cron.Descriptor,
+ )
+
+ if _, err := parser.Parse(spec); err != nil {
+ return err
+ }
+ return nil
+}
+
func (s *Scheduler) Update() error {
// 删除所有定时任务
s.RemoveAll()
diff --git a/backend/services/task.go b/backend/services/task.go
index 80f063ff..9e584e82 100644
--- a/backend/services/task.go
+++ b/backend/services/task.go
@@ -100,91 +100,85 @@ func AssignTask(task model.Task) error {
return nil
}
-// 执行shell命令
-func ExecuteShellCmd(cmdStr string, cwd string, t model.Task, s model.Spider) (err error) {
- log.Infof("cwd: " + cwd)
- log.Infof("cmd: " + cmdStr)
+// 设置环境变量
+func SetEnv(cmd *exec.Cmd, envs []model.Env, taskId string, dataCol string) *exec.Cmd {
+ // 默认环境变量
+ cmd.Env = append(os.Environ(), "CRAWLAB_TASK_ID="+taskId)
+ cmd.Env = append(cmd.Env, "CRAWLAB_COLLECTION="+dataCol)
+ cmd.Env = append(cmd.Env, "PYTHONUNBUFFERED=0")
+ cmd.Env = append(cmd.Env, "PYTHONIOENCODING=utf-8")
+ cmd.Env = append(cmd.Env, "TZ=Asia/Shanghai")
- // 生成执行命令
- var cmd *exec.Cmd
- if runtime.GOOS == constants.Windows {
- cmd = exec.Command("cmd", "/C", cmdStr)
- } else {
- cmd = exec.Command("sh", "-c", cmdStr)
- }
-
- // 工作目录
- cmd.Dir = cwd
-
- // 指定stdout, stderr日志位置
- fLog, err := os.Create(t.LogPath)
- if err != nil {
- HandleTaskError(t, err)
- return err
- }
- defer utils.Close(fLog)
- cmd.Stdout = fLog
- cmd.Stderr = fLog
-
- // 添加默认环境变量
- cmd.Env = append(cmd.Env, "CRAWLAB_TASK_ID="+t.Id)
- cmd.Env = append(cmd.Env, "CRAWLAB_COLLECTION="+s.Col)
-
- // 添加任务环境变量
- for _, env := range s.Envs {
+ //任务环境变量
+ for _, env := range envs {
cmd.Env = append(cmd.Env, env.Name+"="+env.Value)
}
- // 起一个goroutine来监控进程
- ch := utils.TaskExecChanMap.ChanBlocked(t.Id)
- go func() {
- // 传入信号,此处阻塞
- signal := <-ch
- log.Infof("cancel process signal: %s", signal)
- if signal == constants.TaskCancel && cmd.Process != nil {
- // 取消进程
- if err := syscall.Kill(-cmd.Process.Pid, syscall.SIGKILL); err != nil {
- log.Errorf("process kill error: %s", err.Error())
- debug.PrintStack()
- }
- t.Status = constants.StatusCancelled
- } else {
- // 保存任务
- t.Status = constants.StatusFinished
- }
- t.FinishTs = time.Now()
- t.Error = "user kill the process ..."
- if err := t.Save(); err != nil {
- log.Infof("save task error: %s", err.Error())
+ // TODO 全局环境变量
+ return cmd
+}
+
+func SetLogConfig(cmd *exec.Cmd, path string) error {
+ fLog, err := os.Create(path)
+ if err != nil {
+ log.Errorf("create task log file error: %s", path)
+ debug.PrintStack()
+ return err
+ }
+ cmd.Stdout = fLog
+ cmd.Stderr = fLog
+ return nil
+}
+
+func FinishOrCancelTask(ch chan string, cmd *exec.Cmd, t model.Task) {
+ // 传入信号,此处阻塞
+ signal := <-ch
+ log.Infof("process received signal: %s", signal)
+
+ if signal == constants.TaskCancel && cmd.Process != nil {
+ // 取消进程
+ if err := syscall.Kill(-cmd.Process.Pid, syscall.SIGKILL); err != nil {
+ log.Errorf("process kill error: %s", err.Error())
debug.PrintStack()
- return
+
+ t.Error = "kill process error: " + err.Error()
+ t.Status = constants.StatusError
+ } else {
+ t.Error = "user kill the process ..."
+ t.Status = constants.StatusCancelled
}
- }()
+ } else {
+ // 保存任务
+ t.Status = constants.StatusFinished
+ }
- // 在选择所有节点执行的时候,实际就是随机一个节点执行的,
- cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
+ t.FinishTs = time.Now()
+ _ = t.Save()
+}
- // 异步启动进程
+func StartTaskProcess(cmd *exec.Cmd, t model.Task) error {
if err := cmd.Start(); err != nil {
log.Errorf("start spider error:{}", err.Error())
debug.PrintStack()
- return err
- }
- // 保存pid到task
- t.Pid = cmd.Process.Pid
- if err := t.Save(); err != nil {
- log.Errorf("save task pid error: %s", err.Error())
- debug.PrintStack()
+ t.Error = "start task error: " + err.Error()
+ t.Status = constants.StatusError
+ t.FinishTs = time.Now()
+ _ = t.Save()
return err
}
- // 同步等待进程完成
+ return nil
+}
+
+func WaitTaskProcess(cmd *exec.Cmd, t model.Task) error {
if err := cmd.Wait(); err != nil {
log.Errorf("wait process finish error: %s", err.Error())
debug.PrintStack()
+
if exitError, ok := err.(*exec.ExitError); ok {
exitCode := exitError.ExitCode()
log.Errorf("exit error, exit code: %d", exitCode)
+
// 非kill 的错误类型
if exitCode != -1 {
// 非手动kill保存为错误状态
@@ -194,6 +188,52 @@ func ExecuteShellCmd(cmdStr string, cwd string, t model.Task, s model.Spider) (e
_ = t.Save()
}
}
+
+ return err
+ }
+ return nil
+}
+
+// 执行shell命令
+func ExecuteShellCmd(cmdStr string, cwd string, t model.Task, s model.Spider) (err error) {
+ log.Infof("cwd: %s", cwd)
+ log.Infof("cmd: %s", cmdStr)
+
+ // 生成执行命令
+ var cmd *exec.Cmd
+ if runtime.GOOS == constants.Windows {
+ cmd = exec.Command("cmd", "/C", cmdStr)
+ } else {
+ cmd = exec.Command("")
+ cmd = exec.Command("sh", "-c", cmdStr)
+ }
+
+ // 工作目录
+ cmd.Dir = cwd
+
+ // 日志配置
+ if err := SetLogConfig(cmd, t.LogPath); err != nil {
+ return err
+ }
+
+ // 环境变量配置
+ cmd = SetEnv(cmd, s.Envs, t.Id, s.Col)
+
+ // 起一个goroutine来监控进程
+ ch := utils.TaskExecChanMap.ChanBlocked(t.Id)
+
+ go FinishOrCancelTask(ch, cmd, t)
+
+ // kill的时候,可以kill所有的子进程
+ cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
+
+ // 启动进程
+ if err := StartTaskProcess(cmd, t); err != nil {
+ return err
+ }
+
+ // 同步等待进程完成
+ if err := WaitTaskProcess(cmd, t); err != nil {
return err
}
ch <- constants.TaskFinish
@@ -208,6 +248,7 @@ func MakeLogDir(t model.Task) (fileDir string, err error) {
// 如果日志目录不存在,生成该目录
if !utils.Exists(fileDir) {
if err := os.MkdirAll(fileDir, 0777); err != nil {
+ log.Errorf("execute task, make log dir error: %s", err.Error())
debug.PrintStack()
return "", err
}
@@ -272,82 +313,55 @@ func ExecuteTask(id int) {
// 获取当前节点
node, err := model.GetCurrentNode()
if err != nil {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
+ log.Errorf("execute task get current node error: %s", err.Error())
+ debug.PrintStack()
return
}
- // 公共队列
- queuePub := "tasks:public"
-
// 节点队列
queueCur := "tasks:node:" + node.Id.Hex()
-
// 节点队列任务
var msg string
- msg, err = database.RedisClient.LPop(queueCur)
- if err != nil {
- if msg == "" {
- // 节点队列没有任务,获取公共队列任务
- msg, err = database.RedisClient.LPop(queuePub)
- if err != nil {
- if msg == "" {
- // 公共队列没有任务
- log.Debugf(GetWorkerPrefix(id) + "没有任务...")
- return
- } else {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
- debug.PrintStack()
- return
- }
- }
- } else {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
- debug.PrintStack()
- return
+ if msg, err = database.RedisClient.LPop(queueCur); err != nil {
+ // 节点队列没有任务,获取公共队列任务
+ queuePub := "tasks:public"
+ if msg, err = database.RedisClient.LPop(queuePub); err != nil {
}
}
+ if msg == "" {
+ return
+ }
+
// 反序列化
tMsg := TaskMessage{}
if err := json.Unmarshal([]byte(msg), &tMsg); err != nil {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
- debug.PrintStack()
+ log.Errorf("json string to struct error: %s", err.Error())
return
}
// 获取任务
t, err := model.GetTask(tMsg.Id)
if err != nil {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
+ log.Errorf("execute task, get task error: %s", err.Error())
return
}
// 获取爬虫
spider, err := t.GetSpider()
if err != nil {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
+ log.Errorf("execute task, get spider error: %s", err.Error())
return
}
// 创建日志目录
- fileDir, err := MakeLogDir(t)
- if err != nil {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
+ var fileDir string
+ if fileDir, err = MakeLogDir(t); err != nil {
return
}
-
// 获取日志文件路径
t.LogPath = GetLogFilePaths(fileDir)
- // 创建日志目录文件夹
- fileStdoutDir := filepath.Dir(t.LogPath)
- if !utils.Exists(fileStdoutDir) {
- if err := os.MkdirAll(fileStdoutDir, os.ModePerm); err != nil {
- log.Errorf(GetWorkerPrefix(id) + err.Error())
- return
- }
- }
-
// 工作目录
cwd := filepath.Join(
viper.GetString("spider.path"),
diff --git a/frontend/src/components/Common/CrawlConfirmDialog.vue b/frontend/src/components/Common/CrawlConfirmDialog.vue
index 266ef2eb..2286beb2 100644
--- a/frontend/src/components/Common/CrawlConfirmDialog.vue
+++ b/frontend/src/components/Common/CrawlConfirmDialog.vue
@@ -9,9 +9,8 @@
-
diff --git a/frontend/src/views/schedule/ScheduleList.vue b/frontend/src/views/schedule/ScheduleList.vue
index c44d46e2..b170c9ed 100644
--- a/frontend/src/views/schedule/ScheduleList.vue
+++ b/frontend/src/views/schedule/ScheduleList.vue
@@ -14,9 +14,9 @@
-
+
-
+
-
-
-
-
- {{$t('schedules.cron')}}
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
- {{$t('schedules.add_cron')}}
+
+