mirror of
https://github.com/crawlab-team/crawlab.git
synced 2026-01-22 17:31:03 +01:00
66 lines
1.6 KiB
Go
66 lines
1.6 KiB
Go
api:
|
||
address: "localhost:8000"
|
||
mongo:
|
||
host: localhost
|
||
port: 27017
|
||
db: crawlab_test
|
||
username: ""
|
||
password: ""
|
||
authSource: "admin"
|
||
redis:
|
||
address: localhost
|
||
password: ""
|
||
database: 1
|
||
port: 6379
|
||
log:
|
||
level: info
|
||
path: "/var/logs/crawlab"
|
||
isDeletePeriodically: "N"
|
||
deleteFrequency: "@hourly"
|
||
server:
|
||
host: 0.0.0.0
|
||
port: 8000
|
||
master: "Y"
|
||
secret: "crawlab"
|
||
register:
|
||
# type 填 mac/ip/customName, 如果是ip,则需要手动指定IP, 如果是 customName, 需填写你的 customNodeName
|
||
type: "mac"
|
||
customNodeName: "" # 自定义节点名称, default node1,只有在type = customName 时生效
|
||
ip: ""
|
||
lang: # 安装语言环境, Y 为安装,N 为不安装
|
||
python: "Y"
|
||
node: "N"
|
||
java: "N"
|
||
dotnet: "N"
|
||
php: "N"
|
||
spider:
|
||
path: "/app/spiders"
|
||
task:
|
||
workers: 16
|
||
other:
|
||
tmppath: "/tmp"
|
||
version: 0.5.0
|
||
setting:
|
||
crawlabLogToES: "N" # Send crawlab runtime log to ES, open this option "Y", remember to set esClient
|
||
crawlabLogIndex: "crawlab-log"
|
||
allowRegister: "N"
|
||
enableTutorial: "N"
|
||
runOnMaster: "Y"
|
||
demoSpiders: "N"
|
||
checkScrapy: "Y"
|
||
autoInstall: "Y"
|
||
esClient: "" # Your ES client, for example, http://192.168.1.1:9200 or http://your-domain.com, if not use es, set empty
|
||
spiderLogIndex: "spider-log" # Index pattern for kibana, need to config on kibana
|
||
notification:
|
||
mail:
|
||
server: ''
|
||
port: ''
|
||
senderEmail: ''
|
||
senderIdentity: ''
|
||
smtp:
|
||
user: ''
|
||
password: ''
|
||
repo:
|
||
apiUrl: "https://center.crawlab.cn/api"
|
||
# apiUrl: "http://localhost:8002"
|
||
ossUrl: "https://repo.crawlab.cn" |