api: address: "localhost:8000" mongo: host: localhost port: 27017 db: crawlab_test username: "" password: "" authSource: "admin" redis: address: localhost password: "" database: 1 port: 6379 log: level: info path: "/var/logs/crawlab" isDeletePeriodically: "N" deleteFrequency: "@hourly" server: host: 0.0.0.0 port: 8000 master: "Y" secret: "crawlab" register: # mac地址/ip地址/hostname, 如果是ip,则需要手动指定IP type: "mac" ip: "" lang: # 安装语言环境, Y 为安装,N 为不安装 python: "Y" node: "N" java: "N" dotnet: "N" php: "N" spider: path: "/app/spiders" task: workers: 4 other: tmppath: "/tmp" version: 0.4.10 setting: crawlabLogToES: "N" # Send crawlab runtime log to ES, open this option "Y", remember to set esClient crawlabLogIndex: "crawlab-log" allowRegister: "N" enableTutorial: "N" runOnMaster: "Y" demoSpiders: "N" checkScrapy: "Y" autoInstall: "Y" esClient: "" # Your ES client, for example, http://192.168.1.1:9200 or http://your-domain.com, if not use es, set empty spiderLogIndex: "spider-log" # Index pattern for kibana, need to config on kibana notification: mail: server: '' port: '' senderEmail: '' senderIdentity: '' smtp: user: '' password: ''