mongo: host: localhost port: 27017 db: crawlab_test username: "" password: "" authSource: "admin" server: host: 0.0.0.0 port: 8000 # master: true # secret: "crawlab" # register: # # type 填 mac/ip/customName, 如果是ip,则需要手动指定IP, 如果是 customName, 需填写你的 customNodeName # type: "mac" # customNodeName: "" # 自定义节点名称, default node1,只有在type = customName 时生效 # ip: "" # lang: # 安装语言环境, Y 为安装,N 为不安装 # python: "Y" # node: "N" # java: "N" # dotnet: "N" # php: "N" # scripts: "/app/backend/scripts" spider: fs: "/spiders" workspace: "/workspace" repo: "/repo" task: workers: 16 cancelWaitSeconds: 30 # TODO: implement #setting: # crawlabLogToES: "N" # Send crawlab runtime log to ES, open this option "Y", remember to set esClient # crawlabLogIndex: "crawlab-log" # allowRegister: "N" # enableTutorial: "N" # runOnMaster: "Y" # demoSpiders: "N" # checkScrapy: "Y" # autoInstall: "Y" # esClient: "" # Your ES client, for example, http://192.168.1.1:9200 or http://your-domain.com, if not use es, set empty # spiderLogIndex: "spider-log" # Index pattern for kibana, need to config on kibana # TODO: implement #notification: # mail: # server: '' # port: '' # senderEmail: '' # senderIdentity: '' # smtp: # user: '' # password: '' grpc: address: localhost:9666 server: address: 0.0.0.0:9666 authKey: Crawlab2021! fs: filer: proxy: http://localhost:8888 url: http://localhost:8000/filer authKey: Crawlab2021!