diff --git a/CHANGELOG-zh.md b/CHANGELOG-zh.md new file mode 100644 index 00000000..c00c4fc1 --- /dev/null +++ b/CHANGELOG-zh.md @@ -0,0 +1,149 @@ +# 0.4.3 (2020-01-07) + +### 功能 / 优化 +- **依赖安装**. 允许用户在平台 Web 界面安装/卸载依赖以及添加编程语言(暂时只有 Node.js)。 +- **Docker 中预装编程语言**. 允许 Docker 用户通过设置 `CRAWLAB_SERVER_LANG_NODE` 为 `Y` 来预装 `Node.js` 环境. +- **在爬虫详情页添加定时任务列表**. 允许用户在爬虫详情页查看、添加、编辑定时任务. [#360](https://github.com/crawlab-team/crawlab/issues/360) +- **Cron 表达式与 Linux 一致**. 将表达式从 6 元素改为 5 元素,与 Linux 一致. +- **启用/禁用定时任务**. 允许用户启用/禁用定时任务. [#297](https://github.com/crawlab-team/crawlab/issues/297) +- **优化任务管理**. 允许用户批量删除任务. [#341](https://github.com/crawlab-team/crawlab/issues/341) +- **优化爬虫管理**. 允许用户在爬虫列表页对爬虫进行筛选和排序. +- **添加中文版 `CHANGELOG`**. +- **在顶部添加 Github 加星按钮**. + +### Bug 修复 +- **定时任务问题**. [#423](https://github.com/crawlab-team/crawlab/issues/423) +- **上传爬虫zip文件问题**. [#403](https://github.com/crawlab-team/crawlab/issues/403) [#407](https://github.com/crawlab-team/crawlab/issues/407) +- **因为网络原因导致崩溃**. [#340](https://github.com/crawlab-team/crawlab/issues/340) + +# 0.4.2 (2019-12-26) +### 功能 / 优化 +- **免责声明**. 加入免责声明. +- **通过 API 获取版本号**. [#371](https://github.com/crawlab-team/crawlab/issues/371) +- **通过配置来允许用户注册**. [#346](https://github.com/crawlab-team/crawlab/issues/346) +- **允许添加新用户**. +- **更高级的文件管理**. 允许用户添加、编辑、重命名、删除代码文件. [#286](https://github.com/crawlab-team/crawlab/issues/286) +- **优化爬虫创建流程**. 允许用户在上传 zip 文件前创建空的自定义爬虫. +- **优化任务管理**. 允许用户通过选择条件过滤任务. [#341](https://github.com/crawlab-team/crawlab/issues/341) + +### Bug 修复 +- **重复节点**. [#391](https://github.com/crawlab-team/crawlab/issues/391) +- **"mongodb no reachable" 错误**. [#373](https://github.com/crawlab-team/crawlab/issues/373) + +# 0.4.1 (2019-12-13) +### 功能 / 优化 +- **Spiderfile 优化**. 将阶段由数组更换为字典. [#358](https://github.com/crawlab-team/crawlab/issues/358) +- **百度统计更新**. + +### Bug 修复 +- **无法展示定时任务**. [#353](https://github.com/crawlab-team/crawlab/issues/353) +- **重复节点注册**. [#334](https://github.com/crawlab-team/crawlab/issues/334) + +# 0.4.0 (2019-12-06) +### 功能 / 优化 +- **可配置爬虫**. 允许用户添加 `Spiderfile` 来配置抓取规则. +- **执行模式**. 允许用户选择 3 种任务执行模式: *所有节点*, *指定节点* and *随机*. + +### Bug 修复 +- **任务意外被杀死**. [#306](https://github.com/crawlab-team/crawlab/issues/306) +- **文档更正**. [#301](https://github.com/crawlab-team/crawlab/issues/258) [#301](https://github.com/crawlab-team/crawlab/issues/258) +- **直接部署与 Windows 不兼容**. [#288](https://github.com/crawlab-team/crawlab/issues/288) +- **日志文件丢失**. [#269](https://github.com/crawlab-team/crawlab/issues/269) + +# 0.3.5 (2019-10-28) +### 功能 / 优化 +- **优雅关闭**. [详情](https://github.com/crawlab-team/crawlab/commit/63fab3917b5a29fd9770f9f51f1572b9f0420385) +- **节点信息优化**. [详情](https://github.com/crawlab-team/crawlab/commit/973251a0fbe7a2184ac0da09e0404a17c736aee7) +- **将系统环境变量添加到任务**. [详情](https://github.com/crawlab-team/crawlab/commit/4ab4892471965d6342d30385578ca60dc51f8ad3) +- **自动刷新任务日志**. [详情](https://github.com/crawlab-team/crawlab/commit/4ab4892471965d6342d30385578ca60dc51f8ad3) +- **允许 HTTPS 部署**. [详情](https://github.com/crawlab-team/crawlab/commit/5d8f6f0c56768a6e58f5e46cbf5adff8c7819228) + +### Bug 修复 +- **定时任务中无法获取爬虫列表**. [详情](https://github.com/crawlab-team/crawlab/commit/311f72da19094e3fa05ab4af49812f58843d8d93) +- **无法获取工作节点信息**. [详情](https://github.com/crawlab-team/crawlab/commit/6af06efc17685a9e232e8c2b5fd819ec7d2d1674) +- **运行爬虫任务时无法选择节点**. [详情](https://github.com/crawlab-team/crawlab/commit/31f8e03234426e97aed9b0bce6a50562f957edad) +- **结果量很大时无法获取结果数量**. [#260](https://github.com/crawlab-team/crawlab/issues/260) +- **定时任务中的节点问题**. [#244](https://github.com/crawlab-team/crawlab/issues/244) + + +# 0.3.1 (2019-08-25) +### 功能 / 优化 +- **Docker 镜像优化**. 将 Docker 镜像进一步分割成 alpine 镜像版本的 master、worker、frontendSplit docker further into master, worker, frontend. +- **单元测试**. 用单元测试覆盖部分后端代码. +- **前端优化**. 登录页、按钮大小、上传 UI 提示. +- **更灵活的节点注册**. 允许用户传一个变量作为注册 key,而不是默认的 MAC 地址. + +### Bug 修复 +- **上传大爬虫文件错误**. 上传大爬虫文件时的内存崩溃问题. [#150](https://github.com/crawlab-team/crawlab/issues/150) +- **无法同步爬虫**. 通过提高写权限等级来修复同步爬虫文件时的问题. [#114](https://github.com/crawlab-team/crawlab/issues/114) +- **爬虫页问题**. 通过删除 `Site` 字段来修复. [#112](https://github.com/crawlab-team/crawlab/issues/112) +- **节点展示问题**. 当在多个机器上跑 Docker 容器时,节点无法正确展示. [#99](https://github.com/crawlab-team/crawlab/issues/99) + +# 0.3.0 (2019-07-31) +### 功能 / 优化 +- **Golang 后端**: 将后端由 Python 重构为 Golang,很大的提高了稳定性和性能. +- **节点网络图**: 节点拓扑图可视化. +- **节点系统信息**: 可以查看包括操作系统、CPU数量、可执行文件在内的系统信息. +- **节点监控改进**: 节点通过 Redis 来监控和注册. +- **文件管理**: 可以在线编辑爬虫文件,包括代码高亮. +- **登录页/注册页/用户管理**: 要求用户登录后才能使用 Crawlab, 允许用户注册和用户管理,有一些基于角色的鉴权机制. +- **自动部署爬虫**: 爬虫将被自动部署或同步到所有在线节点. +- **更小的 Docker 镜像**: 瘦身版 Docker 镜像,通过多阶段构建将 Docker 镜像大小从 1.3G 减小到 700M 左右. + +### Bug 修复 +- **节点状态**. 节点状态不会随着节点下线而更新. [#87](https://github.com/tikazyq/crawlab/issues/87) +- **爬虫部署错误**. 通过自动爬虫部署来修复 [#83](https://github.com/tikazyq/crawlab/issues/83) +- **节点无法显示**. 节点无法显示在线 [#81](https://github.com/tikazyq/crawlab/issues/81) +- **定时任务无法工作**. 通过 Golang 后端修复 [#64](https://github.com/tikazyq/crawlab/issues/64) +- **Flower 错误**. 通过 Golang 后端修复 [#57](https://github.com/tikazyq/crawlab/issues/57) + +# 0.2.4 (2019-07-07) +### 功能 / 优化 +- **文档**: 更优和更详细的文档. +- **更好的 Crontab**: 通过 UI 界面生成 Cron 表达式. +- **更优的性能**: 从原生 flask 引擎 切换到 `gunicorn`. [#78](https://github.com/tikazyq/crawlab/issues/78) + +### Bug 修复 +- **删除爬虫**. 删除爬虫时不止在数据库中删除,还应该删除相关的文件夹、任务和定时任务. [#69](https://github.com/tikazyq/crawlab/issues/69) +- **MongoDB 授权**. 允许用户注明 `authenticationDatabase` 来连接 `mongodb`. [#68](https://github.com/tikazyq/crawlab/issues/68) +- **Windows 兼容性**. 加入 `eventlet` 到 `requirements.txt`. [#59](https://github.com/tikazyq/crawlab/issues/59) + + +# 0.2.3 (2019-06-12) +### 功能 / 优化 +- **Docker**: 用户能够运行 Docker 镜像来加快部署. +- **CLI**: 允许用户通过命令行来执行 Crawlab 程序. +- **上传爬虫**: 允许用户上传自定义爬虫到 Crawlab. +- **预览时编辑字段**: 允许用户在可配置爬虫中预览数据时编辑字段. + +### Bug 修复 +- **爬虫分页**. 爬虫列表页中修复分页问题. + +# 0.2.2 (2019-05-30) +### 功能 / 优化 +- **自动抓取字段**: 在可配置爬虫列表页种自动抓取字段. +- **下载结果**: 允许下载结果为 CSV 文件. +- **百度统计**: 允许用户选择是否允许向百度统计发送统计数据. + +### Bug 修复 +- **结果页分页**. [#45](https://github.com/tikazyq/crawlab/issues/45) +- **定时任务重复触发**: 将 Flask DEBUG 设置为 False 来保证定时任务无法重复触发. [#32](https://github.com/tikazyq/crawlab/issues/32) +- **前端环境**: 添加 `VUE_APP_BASE_URL` 作为生产环境模式变量,然后 API 不会永远都是 `localhost` [#30](https://github.com/tikazyq/crawlab/issues/30) + +# 0.2.1 (2019-05-27) +- **可配置爬虫**: 允许用户创建爬虫来抓取数据,而不用编写代码. + +# 0.2 (2019-05-10) + +- **高级数据统计**: 爬虫详情页的高级数据统计. +- **网站数据**: 加入网站列表(中国),允许用户查看 robots.txt、首页响应时间等信息. + +# 0.1.1 (2019-04-23) + +- **基础统计**: 用户可以查看基础统计数据,包括爬虫和任务页中的失败任务数、结果数. +- **近实时任务信息**: 周期性(5 秒)向服务器轮训数据来实现近实时查看任务信息. +- **定时任务**: 利用 apscheduler 实现定时任务,允许用户设置类似 Cron 的定时任务. + +# 0.1 (2019-04-17) + +- **首次发布** diff --git a/CHANGELOG.md b/CHANGELOG.md index 1275b9fa..6c64fbd8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,20 @@ -# 0.4.3 (unknown) +# 0.4.3 (2020-01-07) ### Features / Enhancement - **Dependency Installation**. Allow users to install/uninstall dependencies and add programming languages (Node.js only for now) on the platform web interface. - **Pre-install Programming Languages in Docker**. Allow Docker users to set `CRAWLAB_SERVER_LANG_NODE` as `Y` to pre-install `Node.js` environments. +- **Add Schedule List in Spider Detail Page**. Allow users to view / add / edit schedule cron jobs in the spider detail page. [#360](https://github.com/crawlab-team/crawlab/issues/360) +- **Align Cron Expression with Linux**. Change the expression of 6 elements to 5 elements as aligned in Linux. +- **Enable/Disable Schedule Cron**. Allow users to enable/disable the schedule jobs. [#297](https://github.com/crawlab-team/crawlab/issues/297) +- **Better Task Management**. Allow users to batch delete tasks. [#341](https://github.com/crawlab-team/crawlab/issues/341) +- **Better Spider Management**. Allow users to sort and filter spiders in the spider list page. +- **Added Chinese `CHANGELOG`**. +- **Added Github Star Button at Nav Bar**. +### Bug Fixes +- **Schedule Cron Task Issue**. [#423](https://github.com/crawlab-team/crawlab/issues/423) +- **Upload Spider Zip File Issue**. [#403](https://github.com/crawlab-team/crawlab/issues/403) [#407](https://github.com/crawlab-team/crawlab/issues/407) +- **Exit due to Network Failure**. [#340](https://github.com/crawlab-team/crawlab/issues/340) # 0.4.2 (2019-12-26) ### Features / Enhancement diff --git a/README-zh.md b/README-zh.md index 5b9acf29..9057fcc3 100644 --- a/README-zh.md +++ b/README-zh.md @@ -1,16 +1,16 @@ # Crawlab - - + + +  - - - + +  中文 | [English](https://github.com/crawlab-team/crawlab) -[安装](#安装) | [运行](#运行) | [截图](#截图) | [架构](#架构) | [集成](#与其他框架的集成) | [比较](#与其他框架比较) | [相关文章](#相关文章) | [社区&赞助](#社区--赞助) | [免责声明](https://github.com/crawlab-team/crawlab/blob/master/DISCLAIMER-zh.md) +[安装](#安装) | [运行](#运行) | [截图](#截图) | [架构](#架构) | [集成](#与其他框架的集成) | [比较](#与其他框架比较) | [相关文章](#相关文章) | [社区&赞助](#社区--赞助) | [更新日志](https://github.com/crawlab-team/crawlab/blob/master/CHANGELOG-zh.md) | [免责声明](https://github.com/crawlab-team/crawlab/blob/master/DISCLAIMER-zh.md) 基于Golang的分布式爬虫管理平台,支持Python、NodeJS、Go、Java、PHP等多种编程语言以及多种爬虫框架。 @@ -19,9 +19,9 @@ ## 安装 三种方式: -1. [Docker](https://tikazyq.github.io/crawlab-docs/Installation/Docker.html)(推荐) -2. [直接部署](https://tikazyq.github.io/crawlab-docs/Installation/Direct.html)(了解内核) -3. [Kubernetes](https://mp.weixin.qq.com/s/3Q1BQATUIEE_WXcHPqhYbA) +1. [Docker](http://docs.crawlab.cn/Installation/Docker.html)(推荐) +2. [直接部署](http://docs.crawlab.cn/Installation/Direct.html)(了解内核) +3. [Kubernetes](https://juejin.im/post/5e0a02d851882549884c27ad) (多节点部署) ### 要求(Docker) - Docker 18.03+ @@ -31,9 +31,17 @@ ### 要求(直接部署) - Go 1.12+ - Node 8.12+ -- Redis +- Redis 5.x+ - MongoDB 3.6+ +## 快速开始 + +```bash +git clone https://github.com/crawlab-team/crawlab +cd crawlab +docker-compose up -d +``` + ## 运行 ### Docker @@ -123,6 +131,10 @@ Docker部署的详情,请见[相关文档](https://tikazyq.github.io/crawlab-d  +#### 依赖安装 + + + ## 架构 Crawlab的架构包括了一个主节点(Master Node)和多个工作节点(Worker Node),以及负责通信和数据储存的Redis和MongoDB数据库。 diff --git a/README.md b/README.md index 7b7c3d2d..075a80b5 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,16 @@ # Crawlab - - + + +  - - - + +  [中文](https://github.com/crawlab-team/crawlab/blob/master/README-zh.md) | English -[Installation](#installation) | [Run](#run) | [Screenshot](#screenshot) | [Architecture](#architecture) | [Integration](#integration-with-other-frameworks) | [Compare](#comparison-with-other-frameworks) | [Community & Sponsorship](#community--sponsorship) | [Disclaimer](https://github.com/crawlab-team/crawlab/blob/master/DISCLAIMER.md) +[Installation](#installation) | [Run](#run) | [Screenshot](#screenshot) | [Architecture](#architecture) | [Integration](#integration-with-other-frameworks) | [Compare](#comparison-with-other-frameworks) | [Community & Sponsorship](#community--sponsorship) | [CHANGELOG](https://github.com/crawlab-team/crawlab/blob/master/CHANGELOG.md) | [Disclaimer](https://github.com/crawlab-team/crawlab/blob/master/DISCLAIMER.md) Golang-based distributed web crawler management platform, supporting various languages including Python, NodeJS, Go, Java, PHP and various web crawler frameworks including Scrapy, Puppeteer, Selenium. @@ -19,9 +19,9 @@ Golang-based distributed web crawler management platform, supporting various lan ## Installation Two methods: -1. [Docker](https://tikazyq.github.io/crawlab-docs/Installation/Docker.html) (Recommended) -2. [Direct Deploy](https://tikazyq.github.io/crawlab-docs/Installation/Direct.html) (Check Internal Kernel) -3. [Kubernetes](https://mp.weixin.qq.com/s/3Q1BQATUIEE_WXcHPqhYbA) +1. [Docker](http://docs.crawlab.cn/Installation/Docker.html) (Recommended) +2. [Direct Deploy](http://docs.crawlab.cn/Installation/Direct.html) (Check Internal Kernel) +3. [Kubernetes](https://juejin.im/post/5e0a02d851882549884c27ad) (Multi-Node Deployment) ### Pre-requisite (Docker) - Docker 18.03+ @@ -31,9 +31,17 @@ Two methods: ### Pre-requisite (Direct Deploy) - Go 1.12+ - Node 8.12+ -- Redis +- Redis 5.x+ - MongoDB 3.6+ +## Quick Start + +```bash +git clone https://github.com/crawlab-team/crawlab +cd crawlab +docker-compose up -d +``` + ## Run ### Docker @@ -121,6 +129,10 @@ For Docker Deployment details, please refer to [relevant documentation](https://  +#### Dependency Installation + + + ## Architecture The architecture of Crawlab is consisted of the Master Node and multiple Worker Nodes, and Redis and MongoDB databases which are mainly for nodes communication and data storage. diff --git a/backend/constants/common.go b/backend/constants/common.go new file mode 100644 index 00000000..9ac6cdbc --- /dev/null +++ b/backend/constants/common.go @@ -0,0 +1,6 @@ +package constants + +const ( + ASCENDING = "ascending" + DESCENDING = "descending" +) diff --git a/backend/constants/schedule.go b/backend/constants/schedule.go index c3104601..520626a9 100644 --- a/backend/constants/schedule.go +++ b/backend/constants/schedule.go @@ -1,7 +1,7 @@ package constants const ( - ScheduleStatusStop = "stop" + ScheduleStatusStop = "stopped" ScheduleStatusRunning = "running" ScheduleStatusError = "error" diff --git a/backend/database/mongo.go b/backend/database/mongo.go index d646285d..5d205ae4 100644 --- a/backend/database/mongo.go +++ b/backend/database/mongo.go @@ -93,5 +93,14 @@ func InitMongo() error { // 赋值给全局mongo session Session = sess } + //Add Unique index for 'key' + keyIndex := mgo.Index{ + Key: []string{"key"}, + Unique: true, + } + s, c := GetCol("nodes") + defer s.Close() + c.EnsureIndex(keyIndex) + return nil } diff --git a/backend/main.go b/backend/main.go index 7dd5046e..08cdf70f 100644 --- a/backend/main.go +++ b/backend/main.go @@ -168,7 +168,7 @@ func main() { authGroup.POST("/spiders/:id/file/rename", routes.RenameSpiderFile) // 爬虫文件重命名 authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录 authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据 - authGroup.GET("/spider/types", routes.GetSpiderTypes) // 爬虫类型 + authGroup.GET("/spiders/:id/schedules", routes.GetSpiderSchedules) // 爬虫定时任务 // 可配置爬虫 authGroup.GET("/config_spiders/:id/config", routes.GetConfigSpiderConfig) // 获取可配置爬虫配置 authGroup.POST("/config_spiders/:id/config", routes.PostConfigSpiderConfig) // 更改可配置爬虫配置 @@ -189,13 +189,13 @@ func main() { authGroup.GET("/tasks/:id/results", routes.GetTaskResults) // 任务结果 authGroup.GET("/tasks/:id/results/download", routes.DownloadTaskResultsCsv) // 下载任务结果 // 定时任务 - authGroup.GET("/schedules", routes.GetScheduleList) // 定时任务列表 - authGroup.GET("/schedules/:id", routes.GetSchedule) // 定时任务详情 - authGroup.PUT("/schedules", routes.PutSchedule) // 创建定时任务 - authGroup.POST("/schedules/:id", routes.PostSchedule) // 修改定时任务 - authGroup.DELETE("/schedules/:id", routes.DeleteSchedule) // 删除定时任务 - authGroup.POST("/schedules/:id/stop", routes.StopSchedule) // 停止定时任务 - authGroup.POST("/schedules/:id/run", routes.RunSchedule) // 运行定时任务 + authGroup.GET("/schedules", routes.GetScheduleList) // 定时任务列表 + authGroup.GET("/schedules/:id", routes.GetSchedule) // 定时任务详情 + authGroup.PUT("/schedules", routes.PutSchedule) // 创建定时任务 + authGroup.POST("/schedules/:id", routes.PostSchedule) // 修改定时任务 + authGroup.DELETE("/schedules/:id", routes.DeleteSchedule) // 删除定时任务 + authGroup.POST("/schedules/:id/disable", routes.DisableSchedule) // 禁用定时任务 + authGroup.POST("/schedules/:id/enable", routes.EnableSchedule) // 启用定时任务 // 统计数据 authGroup.GET("/stats/home", routes.GetHomeStats) // 首页统计数据 // 用户 diff --git a/backend/model/node.go b/backend/model/node.go index effbfbd0..88c4ed66 100644 --- a/backend/model/node.go +++ b/backend/model/node.go @@ -173,8 +173,8 @@ func GetNode(id bson.ObjectId) (Node, error) { defer s.Close() if err := c.FindId(id).One(&node); err != nil { - log.Errorf("get node error: %s, id: %s", err.Error(), id.Hex()) - debug.PrintStack() + //log.Errorf("get node error: %s, id: %s", err.Error(), id.Hex()) + //debug.PrintStack() return node, err } return node, nil diff --git a/backend/model/schedule.go b/backend/model/schedule.go index c1923885..3b654b74 100644 --- a/backend/model/schedule.go +++ b/backend/model/schedule.go @@ -16,20 +16,17 @@ type Schedule struct { Name string `json:"name" bson:"name"` Description string `json:"description" bson:"description"` SpiderId bson.ObjectId `json:"spider_id" bson:"spider_id"` - //NodeId bson.ObjectId `json:"node_id" bson:"node_id"` - //NodeKey string `json:"node_key" bson:"node_key"` Cron string `json:"cron" bson:"cron"` EntryId cron.EntryID `json:"entry_id" bson:"entry_id"` Param string `json:"param" bson:"param"` RunType string `json:"run_type" bson:"run_type"` NodeIds []bson.ObjectId `json:"node_ids" bson:"node_ids"` - - // 状态 - Status string `json:"status" bson:"status"` + Status string `json:"status" bson:"status"` + Enabled bool `json:"enabled" bson:"enabled"` // 前端展示 SpiderName string `json:"spider_name" bson:"spider_name"` - NodeName string `json:"node_name" bson:"node_name"` + Nodes []Node `json:"nodes" bson:"nodes"` Message string `json:"message" bson:"message"` CreateTs time.Time `json:"create_ts" bson:"create_ts"` @@ -84,20 +81,15 @@ func GetScheduleList(filter interface{}) ([]Schedule, error) { var schs []Schedule for _, schedule := range schedules { - // TODO: 获取节点名称 - //if schedule.NodeId == bson.ObjectIdHex(constants.ObjectIdNull) { - // // 选择所有节点 - // schedule.NodeName = "All Nodes" - //} else { - // // 选择单一节点 - // node, err := GetNode(schedule.NodeId) - // if err != nil { - // schedule.Status = constants.ScheduleStatusError - // schedule.Message = constants.ScheduleStatusErrorNotFoundNode - // } else { - // schedule.NodeName = node.Name - // } - //} + // 获取节点名称 + schedule.Nodes = []Node{} + if schedule.RunType == constants.RunTypeSelectedNodes { + for _, nodeId := range schedule.NodeIds { + // 选择单一节点 + node, _ := GetNode(nodeId) + schedule.Nodes = append(schedule.Nodes, node) + } + } // 获取爬虫名称 spider, err := GetSpider(schedule.SpiderId) diff --git a/backend/model/spider.go b/backend/model/spider.go index 02c3aa8d..3026a66b 100644 --- a/backend/model/spider.go +++ b/backend/model/spider.go @@ -107,13 +107,13 @@ func (spider *Spider) Delete() error { } // 获取爬虫列表 -func GetSpiderList(filter interface{}, skip int, limit int) ([]Spider, int, error) { +func GetSpiderList(filter interface{}, skip int, limit int, sortStr string) ([]Spider, int, error) { s, c := database.GetCol("spiders") defer s.Close() // 获取爬虫列表 var spiders []Spider - if err := c.Find(filter).Skip(skip).Limit(limit).Sort("+name").All(&spiders); err != nil { + if err := c.Find(filter).Skip(skip).Limit(limit).Sort(sortStr).All(&spiders); err != nil { debug.PrintStack() return spiders, 0, err } @@ -275,27 +275,7 @@ func GetSpiderCount() (int, error) { return count, nil } -// 获取爬虫类型 -func GetSpiderTypes() ([]*entity.SpiderType, error) { - s, c := database.GetCol("spiders") - defer s.Close() - - group := bson.M{ - "$group": bson.M{ - "_id": "$type", - "count": bson.M{"$sum": 1}, - }, - } - var types []*entity.SpiderType - if err := c.Pipe([]bson.M{group}).All(&types); err != nil { - log.Errorf("get spider types error: %s", err.Error()) - debug.PrintStack() - return nil, err - } - - return types, nil -} - +// 获取爬虫定时任务 func GetConfigSpiderData(spider Spider) (entity.ConfigSpiderData, error) { // 构造配置数据 configData := entity.ConfigSpiderData{} diff --git a/backend/model/task.go b/backend/model/task.go index 299661ed..6762bd54 100644 --- a/backend/model/task.go +++ b/backend/model/task.go @@ -117,18 +117,12 @@ func GetTaskList(filter interface{}, skip int, limit int, sortKey string) ([]Tas for i, task := range tasks { // 获取爬虫名称 - spider, err := task.GetSpider() - if err != nil || spider.Id.Hex() == "" { - _ = spider.Delete() - } else { + if spider, err := task.GetSpider(); err == nil { tasks[i].SpiderName = spider.DisplayName } // 获取节点名称 - node, err := task.GetNode() - if node.Id.Hex() == "" || err != nil { - _ = task.Delete() - } else { + if node, err := task.GetNode(); err == nil { tasks[i].NodeName = node.Name } } @@ -142,6 +136,8 @@ func GetTaskListTotal(filter interface{}) (int, error) { var result int result, err := c.Find(filter).Count() if err != nil { + log.Errorf(err.Error()) + debug.PrintStack() return result, err } return result, nil @@ -168,6 +164,8 @@ func AddTask(item Task) error { item.UpdateTs = time.Now() if err := c.Insert(&item); err != nil { + log.Errorf(err.Error()) + debug.PrintStack() return err } return nil @@ -179,6 +177,8 @@ func RemoveTask(id string) error { var result Task if err := c.FindId(id).One(&result); err != nil { + log.Errorf(err.Error()) + debug.PrintStack() return err } diff --git a/backend/routes/schedule.go b/backend/routes/schedule.go index e54c49a3..c7ef474a 100644 --- a/backend/routes/schedule.go +++ b/backend/routes/schedule.go @@ -110,9 +110,9 @@ func DeleteSchedule(c *gin.Context) { } // 停止定时任务 -func StopSchedule(c *gin.Context) { +func DisableSchedule(c *gin.Context) { id := c.Param("id") - if err := services.Sched.Stop(bson.ObjectIdHex(id)); err != nil { + if err := services.Sched.Disable(bson.ObjectIdHex(id)); err != nil { HandleError(http.StatusInternalServerError, c, err) return } @@ -120,9 +120,9 @@ func StopSchedule(c *gin.Context) { } // 运行定时任务 -func RunSchedule(c *gin.Context) { +func EnableSchedule(c *gin.Context) { id := c.Param("id") - if err := services.Sched.Run(bson.ObjectIdHex(id)); err != nil { + if err := services.Sched.Enable(bson.ObjectIdHex(id)); err != nil { HandleError(http.StatusInternalServerError, c, err) return } diff --git a/backend/routes/spider.go b/backend/routes/spider.go index a5623b67..91bab47a 100644 --- a/backend/routes/spider.go +++ b/backend/routes/spider.go @@ -27,22 +27,38 @@ import ( ) func GetSpiderList(c *gin.Context) { - pageNum, _ := c.GetQuery("pageNum") - pageSize, _ := c.GetQuery("pageSize") + pageNum, _ := c.GetQuery("page_num") + pageSize, _ := c.GetQuery("page_size") keyword, _ := c.GetQuery("keyword") t, _ := c.GetQuery("type") + sortKey, _ := c.GetQuery("sort_key") + sortDirection, _ := c.GetQuery("sort_direction") + // 筛选 filter := bson.M{ "name": bson.M{"$regex": bson.RegEx{Pattern: keyword, Options: "im"}}, } - if t != "" && t != "all" { filter["type"] = t } + // 排序 + sortStr := "-_id" + if sortKey != "" && sortDirection != "" { + if sortDirection == constants.DESCENDING { + sortStr = "-" + sortKey + } else if sortDirection == constants.ASCENDING { + sortStr = "+" + sortKey + } else { + HandleErrorF(http.StatusBadRequest, c, "invalid sort_direction") + } + } + + // 分页 page := &entity.Page{} page.GetPage(pageNum, pageSize) - results, count, err := model.GetSpiderList(filter, page.Skip, page.Limit) + + results, count, err := model.GetSpiderList(filter, page.Skip, page.Limit, sortStr) if err != nil { HandleError(http.StatusInternalServerError, c, err) return @@ -693,20 +709,6 @@ func RenameSpiderFile(c *gin.Context) { }) } -// 爬虫类型 -func GetSpiderTypes(c *gin.Context) { - types, err := model.GetSpiderTypes() - if err != nil { - HandleError(http.StatusInternalServerError, c, err) - return - } - c.JSON(http.StatusOK, Response{ - Status: "ok", - Message: "success", - Data: types, - }) -} - func GetSpiderStats(c *gin.Context) { type Overview struct { TaskCount int `json:"task_count" bson:"task_count"` @@ -826,3 +828,25 @@ func GetSpiderStats(c *gin.Context) { }, }) } + +func GetSpiderSchedules(c *gin.Context) { + id := c.Param("id") + + if !bson.IsObjectIdHex(id) { + HandleErrorF(http.StatusBadRequest, c, "spider_id is invalid") + return + } + + // 获取定时任务 + list, err := model.GetScheduleList(bson.M{"spider_id": bson.ObjectIdHex(id)}) + if err != nil { + HandleError(http.StatusInternalServerError, c, err) + return + } + + c.JSON(http.StatusOK, Response{ + Status: "ok", + Message: "success", + Data: list, + }) +} diff --git a/backend/routes/system.go b/backend/routes/system.go index b4e130a9..8c443d2a 100644 --- a/backend/routes/system.go +++ b/backend/routes/system.go @@ -259,6 +259,7 @@ func GetDepJson(c *gin.Context) { _dep, err := services.FetchPythonDepInfo(depName) if err != nil { HandleError(http.StatusInternalServerError, c, err) + return } dep = _dep } else { diff --git a/backend/services/node.go b/backend/services/node.go index 515ce9c9..d6124205 100644 --- a/backend/services/node.go +++ b/backend/services/node.go @@ -103,7 +103,7 @@ func UpdateNodeStatus() { model.ResetNodeStatusToOffline(list) } -// 处理接到信息 +// 处理节点信息 func handleNodeInfo(key string, data *Data) { // 添加同步锁 v, err := database.RedisClient.Lock(key) @@ -186,10 +186,12 @@ func UpdateNodeData() { debug.PrintStack() return } + if err := database.RedisClient.HSet("nodes", key, utils.BytesToString(dataBytes)); err != nil { log.Errorf(err.Error()) return } + } func MasterNodeCallback(message redis.Message) (err error) { diff --git a/backend/services/schedule.go b/backend/services/schedule.go index 53938aea..d737c3ac 100644 --- a/backend/services/schedule.go +++ b/backend/services/schedule.go @@ -53,6 +53,8 @@ func AddScheduleTask(s model.Schedule) func() { Param: s.Param, } if err := AddTask(t); err != nil { + log.Errorf(err.Error()) + debug.PrintStack() return } if err := AssignTask(t); err != nil { @@ -137,7 +139,7 @@ func (s *Scheduler) Start() error { func (s *Scheduler) AddJob(job model.Schedule) error { spec := job.Cron - // 添加任务 + // 添加定时任务 eid, err := s.cron.AddFunc(spec, AddScheduleTask(job)) if err != nil { log.Errorf("add func task error: %s", err.Error()) @@ -147,7 +149,12 @@ func (s *Scheduler) AddJob(job model.Schedule) error { // 更新EntryID job.EntryId = eid + + // 更新状态 job.Status = constants.ScheduleStatusRunning + job.Enabled = true + + // 保存定时任务 if err := job.Save(); err != nil { log.Errorf("job save error: %s", err.Error()) debug.PrintStack() @@ -176,8 +183,8 @@ func ParserCron(spec string) error { return nil } -// 停止定时任务 -func (s *Scheduler) Stop(id bson.ObjectId) error { +// 禁用定时任务 +func (s *Scheduler) Disable(id bson.ObjectId) error { schedule, err := model.GetSchedule(id) if err != nil { return err @@ -185,17 +192,22 @@ func (s *Scheduler) Stop(id bson.ObjectId) error { if schedule.EntryId == 0 { return errors.New("entry id not found") } + + // 从cron服务中删除该任务 s.cron.Remove(schedule.EntryId) + // 更新状态 schedule.Status = constants.ScheduleStatusStop + schedule.Enabled = false + if err = schedule.Save(); err != nil { return err } return nil } -// 运行任务 -func (s *Scheduler) Run(id bson.ObjectId) error { +// 启用定时任务 +func (s *Scheduler) Enable(id bson.ObjectId) error { schedule, err := model.GetSchedule(id) if err != nil { return err diff --git a/backend/services/spider.go b/backend/services/spider.go index 3515afa9..e97c7992 100644 --- a/backend/services/spider.go +++ b/backend/services/spider.go @@ -143,7 +143,7 @@ func ReadFileByStep(filePath string, handle func([]byte, *mgo.GridFile), fileCre // 发布所有爬虫 func PublishAllSpiders() { // 获取爬虫列表 - spiders, _, _ := model.GetSpiderList(nil, 0, constants.Infinite) + spiders, _, _ := model.GetSpiderList(nil, 0, constants.Infinite, "-_id") if len(spiders) == 0 { return } diff --git a/backend/services/system.go b/backend/services/system.go index 12b8744c..6181afee 100644 --- a/backend/services/system.go +++ b/backend/services/system.go @@ -251,6 +251,9 @@ func FetchPythonDepInfo(depName string) (entity.Dependency, error) { return entity.Dependency{}, err } var data PythonDepJsonData + if res.Response().StatusCode == 404 { + return entity.Dependency{}, errors.New("get depName from [https://pypi.org] error: 404") + } if err := res.ToJSON(&data); err != nil { log.Errorf(err.Error()) debug.PrintStack() diff --git a/backend/services/task.go b/backend/services/task.go index c0aa8fc8..7da6b022 100644 --- a/backend/services/task.go +++ b/backend/services/task.go @@ -627,11 +627,15 @@ func AddTask(t model.Task) error { // 将任务存入数据库 if err := model.AddTask(t); err != nil { + log.Errorf(err.Error()) + debug.PrintStack() return err } // 加入任务队列 if err := AssignTask(t); err != nil { + log.Errorf(err.Error()) + debug.PrintStack() return err } diff --git a/docker-compose.yml b/docker-compose.yml index 5c059f95..fcab07f4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,8 +15,6 @@ services: depends_on: - mongo - redis - volumes: - - "/Users/marvzhang/projects/crawlab-team/crawlab/docker_init.sh:/app/docker_init.sh" worker: image: tikazyq/crawlab:latest container_name: worker diff --git a/frontend/index.html b/frontend/index.html index 2c943e7e..5066906e 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -6,6 +6,10 @@ + + + +