diff --git a/backend/conf/config.yml b/backend/conf/config.yml index b63cd247..80653ce5 100644 --- a/backend/conf/config.yml +++ b/backend/conf/config.yml @@ -59,4 +59,8 @@ notification: senderIdentity: '' smtp: user: '' - password: '' \ No newline at end of file + password: '' +repo: + apiUrl: "https://center.crawlab.cn/api" +# apiUrl: "http://localhost:8002" + ossUrl: "https://repo.crawlab.cn" \ No newline at end of file diff --git a/backend/main.go b/backend/main.go index 4b46b033..b06e0418 100644 --- a/backend/main.go +++ b/backend/main.go @@ -317,6 +317,12 @@ func main() { authGroup.GET("/git/public-key", routes.GetGitSshPublicKey) // 获取 SSH 公钥 authGroup.GET("/git/commits", routes.GetGitCommits) // 获取 Git Commits authGroup.POST("/git/checkout", routes.PostGitCheckout) // 获取 Git Commits + // 爬虫市场 / 仓库 + { + authGroup.GET("/repos", routes.GetRepoList) // 获取仓库列表 + authGroup.GET("/repos/sub-dir", routes.GetRepoSubDirList) // 获取仓库子目录 + authGroup.POST("/repos/download", routes.DownloadRepo) // 下载仓库 + } } } diff --git a/backend/routes/base.go b/backend/routes/base.go index b338a833..0204b0ea 100644 --- a/backend/routes/base.go +++ b/backend/routes/base.go @@ -14,3 +14,11 @@ type ListResponse struct { Data interface{} `json:"data"` Error string `json:"error"` } + +type ListRequestData struct { + PageNum int `form:"page_num" json:"page_num"` + PageSize int `form:"page_size" json:"page_size"` + SortKey string `form:"sort_key" json:"sort_key"` + Status string `form:"status" json:"status"` + Keyword string `form:"keyword" json:"keyword"` +} diff --git a/backend/routes/repos.go b/backend/routes/repos.go new file mode 100644 index 00000000..00a4e847 --- /dev/null +++ b/backend/routes/repos.go @@ -0,0 +1,81 @@ +package routes + +import ( + "crawlab/services" + "fmt" + "github.com/apex/log" + "github.com/gin-gonic/gin" + "github.com/imroc/req" + "github.com/spf13/viper" + "net/http" + "runtime/debug" +) + +func GetRepoList(c *gin.Context) { + var data ListRequestData + if err := c.ShouldBindQuery(&data); err != nil { + HandleError(http.StatusBadRequest, c, err) + return + } + params := req.Param{ + "page_num": data.PageNum, + "page_size": data.PageSize, + "keyword": data.Keyword, + "sort_key": data.SortKey, + } + res, err := req.Get(fmt.Sprintf("%s/public/repos", viper.GetString("repo.apiUrl")), params) + if err != nil { + log.Error("get repos error: " + err.Error()) + debug.PrintStack() + HandleError(http.StatusInternalServerError, c, err) + return + } + var resJson interface{} + if err := res.ToJSON(&resJson); err != nil { + log.Error("to json error: " + err.Error()) + debug.PrintStack() + HandleError(http.StatusInternalServerError, c, err) + return + } + c.JSON(http.StatusOK, resJson) +} + +func GetRepoSubDirList(c *gin.Context) { + params := req.Param{ + "full_name": c.Query("full_name"), + } + res, err := req.Get(fmt.Sprintf("%s/public/repos/sub-dir", viper.GetString("repo.apiUrl")), params) + if err != nil { + log.Error("get repo sub-dir error: " + err.Error()) + debug.PrintStack() + HandleError(http.StatusInternalServerError, c, err) + return + } + var resJson interface{} + if err := res.ToJSON(&resJson); err != nil { + log.Error("to json error: " + err.Error()) + debug.PrintStack() + HandleError(http.StatusInternalServerError, c, err) + return + } + c.JSON(http.StatusOK, resJson) +} + +func DownloadRepo(c *gin.Context) { + type RequestData struct { + FullName string `json:"full_name"` + } + var reqData RequestData + if err := c.ShouldBindJSON(&reqData); err != nil { + HandleError(http.StatusBadRequest, c, err) + return + } + if err := services.DownloadRepo(reqData.FullName, services.GetCurrentUserId(c)); err != nil { + HandleError(http.StatusInternalServerError, c, err) + return + } + c.JSON(http.StatusOK, Response{ + Status: "ok", + Message: "success", + }) +} diff --git a/backend/services/repo.go b/backend/services/repo.go new file mode 100644 index 00000000..8e5f02b0 --- /dev/null +++ b/backend/services/repo.go @@ -0,0 +1,82 @@ +package services + +import ( + "crawlab/constants" + "crawlab/model" + "crawlab/utils" + "fmt" + "github.com/apex/log" + "github.com/globalsign/mgo/bson" + "github.com/imroc/req" + uuid "github.com/satori/go.uuid" + "github.com/spf13/viper" + "path" + "path/filepath" + "runtime/debug" + "strings" +) + +func DownloadRepo(fullName string, userId bson.ObjectId) (err error) { + // 下载 zip 文件 + url := fmt.Sprintf("%s/%s.zip", viper.GetString("repo.ossUrl"), fullName) + progress := func(current, total int64) { + fmt.Println(float32(current)/float32(total)*100, "%") + } + res, err := req.Get(url, req.DownloadProgress(progress)) + if err != nil { + log.Errorf("download repo error: " + err.Error()) + debug.PrintStack() + return err + } + spiderName := strings.Replace(fullName, "/", "_", -1) + randomId := uuid.NewV4() + tmpFilePath := filepath.Join(viper.GetString("other.tmppath"), spiderName+"."+randomId.String()+".zip") + if err := res.ToFile(tmpFilePath); err != nil { + log.Errorf("to file error: " + err.Error()) + debug.PrintStack() + return err + } + + // 解压 zip 文件 + tmpFile := utils.OpenFile(tmpFilePath) + if err := utils.DeCompress(tmpFile, viper.GetString("other.tmppath")); err != nil { + log.Errorf("de-compress error: " + err.Error()) + debug.PrintStack() + return err + } + + // 拷贝文件 + spiderPath := path.Join(viper.GetString("spider.path"), spiderName) + srcDirPath := fmt.Sprintf("%s/data/github.com/%s", viper.GetString("other.tmppath"), fullName) + if err := utils.CopyDir(srcDirPath, spiderPath); err != nil { + log.Errorf("copy error: " + err.Error()) + debug.PrintStack() + return err + } + + // 创建爬虫 + spider := model.Spider{ + Id: bson.NewObjectId(), + Name: spiderName, + DisplayName: spiderName, + Type: constants.Customized, + Src: spiderPath, + ProjectId: bson.ObjectIdHex(constants.ObjectIdNull), + FileId: bson.ObjectIdHex(constants.ObjectIdNull), + UserId: userId, + } + if err := spider.Add(); err != nil { + log.Error("add spider error: " + err.Error()) + debug.PrintStack() + return err + } + + // 上传爬虫 + if err := UploadSpiderToGridFsFromMaster(spider); err != nil { + log.Error("upload spider error: " + err.Error()) + debug.PrintStack() + return err + } + + return nil +} diff --git a/backend/services/spider.go b/backend/services/spider.go index c088abb5..59aa1fad 100644 --- a/backend/services/spider.go +++ b/backend/services/spider.go @@ -239,7 +239,9 @@ func PublishSpider(spider model.Spider) { } // 安装依赖 - go spiderSync.InstallDeps() + if viper.GetString("setting.autoInstall") == "Y" { + go spiderSync.InstallDeps() + } //目录不存在,则直接下载 path := filepath.Join(viper.GetString("spider.path"), spider.Name) diff --git a/frontend/src/i18n/zh.js b/frontend/src/i18n/zh.js index 1bc850a0..68667d2d 100644 --- a/frontend/src/i18n/zh.js +++ b/frontend/src/i18n/zh.js @@ -13,6 +13,7 @@ export default { 'Sites': '网站', 'Setting': '设置', 'Project': '项目', + 'Spider Market': '爬虫市场', // 标签 'Overview': '概览', @@ -518,6 +519,15 @@ export default { 'Year': '年', 'Years': '年', + // 爬虫市场 + 'Search Keyword': '搜索关键词', + 'Sort': '排序', + 'Default Sort': '默认排序', + 'Most Stars': '最多 Stars', + 'Most Forks': '最多 Forks', + 'Latest Pushed': '最近提交', + 'Pushed At': '提交时间', + // 全局 'Related Documentation': '相关文档', 'Click to view related Documentation': '点击查看相关文档', @@ -647,6 +657,8 @@ export default { 'Are you sure to add an API token?': '确认创建 API Token?', 'Are you sure to delete this API token?': '确认删除该 API Token?', 'Please enter Web Hook URL': '请输入 Web Hook URL', + 'Are you sure to download this spider?': '您确定要下载该爬虫?', + 'Downloaded successfully': '下载成功', // 其他 'Star crawlab-team/crawlab on GitHub': '在 GitHub 上为 Crawlab 加星吧' diff --git a/frontend/src/router/index.js b/frontend/src/router/index.js index 7dee4260..203154a5 100644 --- a/frontend/src/router/index.js +++ b/frontend/src/router/index.js @@ -181,6 +181,25 @@ export const constantRouterMap = [ } ] }, + { + path: '/repos', + component: Layout, + meta: { + title: 'Spider Market', + icon: 'fa fa-cloud' + }, + children: [ + { + path: '', + name: 'RepoList', + component: () => import('../views/repo/RepoList'), + meta: { + title: 'Spider Market', + icon: 'fa fa-cloud' + } + } + ] + }, { path: '/disclaimer', component: Layout, @@ -243,8 +262,7 @@ export const constantRouterMap = [ component: Layout, meta: { title: 'User', - icon: 'fa fa-users', - isNew: true + icon: 'fa fa-users' }, children: [ { diff --git a/frontend/src/utils/request.js b/frontend/src/utils/request.js index 6c39099f..b1d1d0a9 100644 --- a/frontend/src/utils/request.js +++ b/frontend/src/utils/request.js @@ -82,7 +82,7 @@ if (!CRAWLAB_API_ADDRESS.match('CRAWLAB_API_ADDRESS')) { const service = axios.create({ baseURL: baseUrl, // url = base url + request url // withCredentials: true, // send cookies when cross-domain requests - timeout: 5000 // request timeout + timeout: 15000 // request timeout }) // request interceptor service.interceptors.request.use( diff --git a/frontend/src/views/repo/RepoList.vue b/frontend/src/views/repo/RepoList.vue new file mode 100644 index 00000000..6519c184 --- /dev/null +++ b/frontend/src/views/repo/RepoList.vue @@ -0,0 +1,257 @@ + + + + + + + + + + + + + + + + + + {{ $t('Search') }} + + + + + + + + + + {{ sub.name }} + + + + + + + + + + + + + + + + + {{ getTime(scope.row.pushed_at) }} + + + + + + + + + + + + + + + + + + + +