mirror of
https://github.com/crawlab-team/crawlab.git
synced 2026-01-26 17:49:15 +01:00
added Results
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -110,3 +110,6 @@ node_modules/
|
||||
|
||||
# egg-info
|
||||
*.egg-info
|
||||
|
||||
# .DS_Store
|
||||
.DS_Store
|
||||
|
||||
21
README.md
21
README.md
@@ -34,6 +34,23 @@ cd frontend
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## Screenshot
|
||||
|
||||
#### Home Page
|
||||

|
||||
|
||||
#### Spider List
|
||||
|
||||

|
||||
|
||||
#### Spider Detail - Overview
|
||||
|
||||

|
||||
|
||||
#### Task Detail - Results
|
||||
|
||||

|
||||
|
||||
## Architecture
|
||||
|
||||
The architecture of Crawlab is as below. It's very similar to Celery architecture, but a few more modules including Frontend, Spiders and Flower are added to feature the crawling management functionality.
|
||||
@@ -60,3 +77,7 @@ After deploying the spider, you can click "Run" button on spider detail page and
|
||||
### Tasks
|
||||
|
||||
Tasks are triggered and run by the workers. Users can check the task status info and logs in the task detail page.
|
||||
|
||||
### App
|
||||
|
||||
### Broker
|
||||
|
||||
@@ -86,12 +86,17 @@ def run_worker():
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument('action', type=click.Choice([ActionType.APP, ActionType.FLOWER, ActionType.RUN_ALL]))
|
||||
@click.argument('action', type=click.Choice([ActionType.APP,
|
||||
ActionType.FLOWER,
|
||||
ActionType.WORKER,
|
||||
ActionType.RUN_ALL]))
|
||||
def main(action):
|
||||
if action == ActionType.APP:
|
||||
run_app()
|
||||
elif action == ActionType.FLOWER:
|
||||
run_flower()
|
||||
elif action == ActionType.WORKER:
|
||||
run_worker()
|
||||
elif action == ActionType.RUN_ALL:
|
||||
p_flower = Process(target=run_flower)
|
||||
p_flower.start()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from constants.task import TaskStatus
|
||||
from db.manager import db_manager
|
||||
from routes.base import BaseApi
|
||||
from utils import jsonify
|
||||
@@ -64,8 +65,11 @@ class NodeApi(BaseApi):
|
||||
spider_id = item['spider_id']
|
||||
spider = db_manager.get('spiders', id=str(spider_id))
|
||||
item['spider_name'] = spider['name']
|
||||
task = db_manager.get('tasks_celery', id=item['_id'])
|
||||
item['status'] = task['status']
|
||||
_task = db_manager.get('tasks_celery', id=item['_id'])
|
||||
if _task:
|
||||
item['status'] = _task['status']
|
||||
else:
|
||||
item['status'] = TaskStatus.UNAVAILABLE
|
||||
return jsonify({
|
||||
'status': 'ok',
|
||||
'items': items
|
||||
|
||||
@@ -30,14 +30,23 @@ class SpiderApi(BaseApi):
|
||||
col_name = 'spiders'
|
||||
|
||||
arguments = (
|
||||
# name of spider
|
||||
('name', str),
|
||||
|
||||
# execute shell command
|
||||
('cmd', str),
|
||||
|
||||
# spider source folder
|
||||
('src', str),
|
||||
|
||||
# spider type
|
||||
('type', str),
|
||||
|
||||
# spider language
|
||||
('lang', str),
|
||||
|
||||
# for deploy only
|
||||
('node_id', str),
|
||||
# spider results collection
|
||||
('col', str),
|
||||
)
|
||||
|
||||
def get(self, id=None, action=None):
|
||||
@@ -130,8 +139,6 @@ class SpiderApi(BaseApi):
|
||||
}
|
||||
|
||||
def on_crawl(self, id):
|
||||
args = self.parser.parse_args()
|
||||
|
||||
job = execute_spider.delay(id)
|
||||
|
||||
return {
|
||||
|
||||
@@ -2,6 +2,7 @@ from constants.task import TaskStatus
|
||||
from db.manager import db_manager
|
||||
from routes.base import BaseApi
|
||||
from utils import jsonify
|
||||
from utils.spider import get_spider_col_fields
|
||||
|
||||
|
||||
class TaskApi(BaseApi):
|
||||
@@ -71,3 +72,17 @@ class TaskApi(BaseApi):
|
||||
'status': 'ok',
|
||||
'error': str(err)
|
||||
}, 500
|
||||
|
||||
def get_results(self, id):
|
||||
task = db_manager.get('tasks', id=id)
|
||||
spider = db_manager.get('spiders', id=task['spider_id'])
|
||||
col_name = spider.get('col')
|
||||
if not col_name:
|
||||
return []
|
||||
fields = get_spider_col_fields(col_name)
|
||||
items = db_manager.list(col_name, {'task_id': id})
|
||||
return jsonify({
|
||||
'status': 'ok',
|
||||
'fields': fields,
|
||||
'items': items
|
||||
})
|
||||
|
||||
@@ -17,7 +17,6 @@ logger = get_logger(__name__)
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def execute_spider(self, id: str):
|
||||
print(self.state)
|
||||
task_id = self.request.id
|
||||
hostname = self.request.hostname
|
||||
spider = db_manager.get('spiders', id=id)
|
||||
@@ -53,6 +52,7 @@ def execute_spider(self, id: str):
|
||||
# execute the command
|
||||
env = os.environ.copy()
|
||||
env['CRAWLAB_TASK_ID'] = task_id
|
||||
env['CRAWLAB_COLLECTION'] = spider.get('col')
|
||||
p = subprocess.Popen(command.split(' '),
|
||||
stdout=stdout.fileno(),
|
||||
stderr=stderr.fileno(),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
|
||||
from constants.spider import FILE_SUFFIX_LANG_MAPPING, LangType, SUFFIX_IGNORE, SpiderType
|
||||
from db.manager import db_manager
|
||||
|
||||
|
||||
def get_lang_by_stats(stats: dict) -> LangType:
|
||||
@@ -21,3 +22,12 @@ def get_spider_type(path: str) -> SpiderType:
|
||||
for file_name in os.listdir(path):
|
||||
if file_name == 'scrapy.cfg':
|
||||
return SpiderType.SCRAPY
|
||||
|
||||
|
||||
def get_spider_col_fields(col_name):
|
||||
items = db_manager.list(col_name, {}, limit=100, sort_key='_id')
|
||||
fields = set()
|
||||
for item in items:
|
||||
for k in item.keys():
|
||||
fields.add(k)
|
||||
return list(fields)
|
||||
|
||||
@@ -15,10 +15,14 @@
|
||||
<el-form-item label="Source Folder">
|
||||
<el-input v-model="spiderForm.src" placeholder="Source Folder" disabled></el-input>
|
||||
</el-form-item>
|
||||
<el-form-item label="Execute Command" prop="cmd" :rule="cmdRule" required>
|
||||
<el-form-item label="Execute Command" prop="cmd" required>
|
||||
<el-input v-model="spiderForm.cmd" placeholder="Execute Command"
|
||||
:disabled="isView"></el-input>
|
||||
</el-form-item>
|
||||
<el-form-item label="Results Collection">
|
||||
<el-input v-model="spiderForm.col" placeholder="Results Collection"
|
||||
:disabled="isView"></el-input>
|
||||
</el-form-item>
|
||||
<el-form-item label="Spider Type">
|
||||
<el-select v-model="spiderForm.type" placeholder="Select Spider Type" :disabled="isView" clearable>
|
||||
<el-option value="scrapy" label="Scrapy"></el-option>
|
||||
@@ -37,8 +41,8 @@
|
||||
</el-form>
|
||||
</el-row>
|
||||
<el-row class="button-container" v-if="!isView">
|
||||
<el-button type="success" @click="onRun">Run</el-button>
|
||||
<el-button type="primary" @click="onDeploy">Deploy</el-button>
|
||||
<el-button type="danger" @click="onRun">Run</el-button>
|
||||
<!--<el-button type="primary" @click="onDeploy">Deploy</el-button>-->
|
||||
<el-button type="success" @click="onSave">Save</el-button>
|
||||
</el-row>
|
||||
</div>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
</el-row>
|
||||
|
||||
<!--last deploys-->
|
||||
<el-row>
|
||||
<el-row v-if="false">
|
||||
<deploy-table-view title="Latest Deploys"/>
|
||||
</el-row>
|
||||
</el-col>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
</el-row>
|
||||
|
||||
<!--last deploys-->
|
||||
<el-row>
|
||||
<el-row v-if="false">
|
||||
<deploy-table-view title="Latest Deploys"/>
|
||||
</el-row>
|
||||
</el-col>
|
||||
|
||||
77
frontend/src/components/TableView/GeneralTableView.vue
Normal file
77
frontend/src/components/TableView/GeneralTableView.vue
Normal file
@@ -0,0 +1,77 @@
|
||||
<template>
|
||||
<div class="general-table-view">
|
||||
<el-table
|
||||
:data="filteredData"
|
||||
:header-cell-style="{background:'rgb(48, 65, 86)',color:'white'}"
|
||||
border>
|
||||
<template v-for="col in columns">
|
||||
<el-table-column :key="col" :label="col" :property="col" align="center">
|
||||
</el-table-column>
|
||||
</template>
|
||||
</el-table>
|
||||
<div class="pagination">
|
||||
<el-pagination
|
||||
:current-page.sync="pagination.pageNum"
|
||||
:page-sizes="[10, 20, 50, 100]"
|
||||
:page-size.sync="pagination.pageSize"
|
||||
layout="sizes, prev, pager, next"
|
||||
:total="data.length">
|
||||
</el-pagination>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'GeneralTableView',
|
||||
data () {
|
||||
return {
|
||||
pagination: {
|
||||
pageNum: 1,
|
||||
pageSize: 10
|
||||
}
|
||||
}
|
||||
},
|
||||
props: {
|
||||
columns: {
|
||||
type: Array,
|
||||
default () {
|
||||
return []
|
||||
}
|
||||
},
|
||||
data: {
|
||||
type: Array,
|
||||
default () {
|
||||
return []
|
||||
}
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
filteredData () {
|
||||
return this.data
|
||||
.map(d => {
|
||||
for (let k in d) {
|
||||
if (d.hasOwnProperty(k)) {
|
||||
if (d[k] === undefined || d[k] === null) continue
|
||||
if (typeof d[k] === 'object') {
|
||||
if (d[k].$oid) {
|
||||
d[k] = d[k].$oid
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return d
|
||||
})
|
||||
.filter((d, index) => {
|
||||
// pagination
|
||||
const { pageNum, pageSize } = this.pagination
|
||||
return (pageSize * (pageNum - 1) <= index) && (index < pageSize * pageNum)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
@@ -4,7 +4,7 @@
|
||||
<h5 class="title">{{title}}</h5>
|
||||
<el-button type="success" plain class="small-btn" size="mini" icon="fa fa-refresh" @click="onRefresh"></el-button>
|
||||
</el-row>
|
||||
<el-table border height="240px" :data="taskList">
|
||||
<el-table border height="480px" :data="taskList">
|
||||
<el-table-column property="node" label="Node" width="220" align="center">
|
||||
<template slot-scope="scope">
|
||||
<a class="a-tag" @click="onClickNode(scope.row)">{{scope.row.node_id}}</a>
|
||||
@@ -65,7 +65,11 @@ export default {
|
||||
this.$router.push(`/tasks/${row._id}`)
|
||||
},
|
||||
onRefresh () {
|
||||
this.$store.dispatch('spider/getTaskList', this.spiderForm._id.$oid)
|
||||
if (this.$route.path.split('/')[1] === 'spiders') {
|
||||
this.$store.dispatch('spider/getTaskList', this.$route.params.id)
|
||||
} else if (this.$route.path.split('/')[1] === 'nodes') {
|
||||
this.$store.dispatch('node/getTaskList', this.$route.params.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +60,8 @@ const actions = {
|
||||
src: state.spiderForm.src,
|
||||
cmd: state.spiderForm.cmd,
|
||||
type: state.spiderForm.type,
|
||||
lang: state.spiderForm.lang
|
||||
lang: state.spiderForm.lang,
|
||||
col: state.spiderForm.col
|
||||
})
|
||||
.then(() => {
|
||||
dispatch('getSpiderList')
|
||||
|
||||
@@ -5,7 +5,9 @@ const state = {
|
||||
// TaskList
|
||||
taskList: [],
|
||||
taskForm: {},
|
||||
taskLog: ''
|
||||
taskLog: '',
|
||||
taskResultsData: [],
|
||||
taskResultsColumns: []
|
||||
}
|
||||
|
||||
const getters = {}
|
||||
@@ -19,6 +21,12 @@ const mutations = {
|
||||
},
|
||||
SET_TASK_LOG (state, value) {
|
||||
state.taskLog = value
|
||||
},
|
||||
SET_TASK_RESULTS_DATA (state, value) {
|
||||
state.taskResultsData = value
|
||||
},
|
||||
SET_TASK_RESULTS_COLUMNS (state, value) {
|
||||
state.taskResultsColumns = value
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +62,13 @@ const actions = {
|
||||
.then(response => {
|
||||
commit('SET_TASK_LOG', response.data.log)
|
||||
})
|
||||
},
|
||||
getTaskResults ({ state, commit }, id) {
|
||||
return request.get(`/tasks/${id}/get_results`)
|
||||
.then(response => {
|
||||
commit('SET_TASK_RESULTS_DATA', response.data.items)
|
||||
commit('SET_TASK_RESULTS_COLUMNS', response.data.fields)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -93,10 +93,10 @@ export default {
|
||||
},
|
||||
// tableData,
|
||||
columns: [
|
||||
{ name: 'version', label: 'Version', width: '180' },
|
||||
// { name: 'version', label: 'Version', width: '180' },
|
||||
// { name: 'ip', label: 'IP', width: '160' },
|
||||
// { name: 'port', label: 'Port', width: '80' },
|
||||
{ name: 'finish_ts', label: 'Finish Time', width: '180' },
|
||||
{ name: 'finish_ts', label: 'Time', width: '180' },
|
||||
{ name: 'spider_name', label: 'Spider', width: '180', sortable: true },
|
||||
{ name: 'node_id', label: 'Node', width: 'auto' }
|
||||
],
|
||||
@@ -121,6 +121,11 @@ export default {
|
||||
}
|
||||
return false
|
||||
})
|
||||
.filter((d, index) => {
|
||||
// pagination
|
||||
const { pageNum, pageSize } = this.pagination
|
||||
return (pageSize * (pageNum - 1) <= index) && (index < pageSize * pageNum)
|
||||
})
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<el-tab-pane label="Overview" name="overview">
|
||||
<node-overview></node-overview>
|
||||
</el-tab-pane>
|
||||
<el-tab-pane label="Deployed Spiders" name="spiders">
|
||||
<el-tab-pane label="Deployed Spiders" name="spiders" v-if="false">
|
||||
Deployed Spiders
|
||||
</el-tab-pane>
|
||||
</el-tabs>
|
||||
|
||||
@@ -158,7 +158,7 @@ export default {
|
||||
{ name: 'name', label: 'Name', width: 'auto' },
|
||||
{ name: 'type', label: 'Spider Type', width: '160', sortable: true },
|
||||
{ name: 'lang', label: 'Language', width: '160', sortable: true },
|
||||
{ name: 'update_ts', label: 'Last Update', width: '120' }
|
||||
{ name: 'last_run_ts', label: 'Last Run', width: '120' }
|
||||
],
|
||||
spiderFormRules: {
|
||||
name: [{ required: true, message: 'Required Field', trigger: 'change' }]
|
||||
|
||||
@@ -12,6 +12,9 @@
|
||||
</pre>
|
||||
</div>
|
||||
</el-tab-pane>
|
||||
<el-tab-pane label="Results" name="results">
|
||||
<general-table-view :data="taskResultsData" :columns="taskResultsColumns"/>
|
||||
</el-tab-pane>
|
||||
</el-tabs>
|
||||
</div>
|
||||
</template>
|
||||
@@ -21,10 +24,12 @@ import {
|
||||
mapState
|
||||
} from 'vuex'
|
||||
import TaskOverview from '../../components/Overview/TaskOverview'
|
||||
import GeneralTableView from '../../components/TableView/GeneralTableView'
|
||||
|
||||
export default {
|
||||
name: 'TaskDetail',
|
||||
components: {
|
||||
GeneralTableView,
|
||||
TaskOverview
|
||||
},
|
||||
data () {
|
||||
@@ -34,7 +39,9 @@ export default {
|
||||
},
|
||||
computed: {
|
||||
...mapState('task', [
|
||||
'taskLog'
|
||||
'taskLog',
|
||||
'taskResultsData',
|
||||
'taskResultsColumns'
|
||||
]),
|
||||
...mapState('file', [
|
||||
'currentPath'
|
||||
@@ -53,6 +60,7 @@ export default {
|
||||
created () {
|
||||
this.$store.dispatch('task/getTaskData', this.$route.params.id)
|
||||
this.$store.dispatch('task/getTaskLog', this.$route.params.id)
|
||||
this.$store.dispatch('task/getTaskResults', this.$route.params.id)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
icon="el-icon-refresh"
|
||||
class="refresh"
|
||||
@click="onRefresh">
|
||||
Search
|
||||
Refresh
|
||||
</el-button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -19,8 +19,8 @@ NEWSPIDER_MODULE = 'baidu.spiders'
|
||||
#USER_AGENT = 'baidu (+http://www.yourdomain.com)'
|
||||
|
||||
# Obey robots.txt rules
|
||||
# ROBOTSTXT_OBEY = True
|
||||
ROBOTSTXT_OBEY = False
|
||||
ROBOTSTXT_OBEY = True
|
||||
# ROBOTSTXT_OBEY = False
|
||||
|
||||
# Configure maximum concurrent requests performed by Scrapy (default: 16)
|
||||
#CONCURRENT_REQUESTS = 32
|
||||
|
||||
0
spiders/example_juejin/juejin/__init__.py
Normal file
0
spiders/example_juejin/juejin/__init__.py
Normal file
17
spiders/example_juejin/juejin/items.py
Normal file
17
spiders/example_juejin/juejin/items.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Define here the models for your scraped items
|
||||
#
|
||||
# See documentation in:
|
||||
# http://doc.scrapy.org/en/latest/topics/items.html
|
||||
|
||||
import scrapy
|
||||
|
||||
|
||||
class JuejinItem(scrapy.Item):
|
||||
# define the fields for your item here like:
|
||||
_id = scrapy.Field()
|
||||
title = scrapy.Field()
|
||||
link = scrapy.Field()
|
||||
like = scrapy.Field()
|
||||
task_id = scrapy.Field()
|
||||
56
spiders/example_juejin/juejin/middlewares.py
Normal file
56
spiders/example_juejin/juejin/middlewares.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Define here the models for your spider middleware
|
||||
#
|
||||
# See documentation in:
|
||||
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
|
||||
|
||||
from scrapy import signals
|
||||
|
||||
|
||||
class JuejinSpiderMiddleware(object):
|
||||
# Not all methods need to be defined. If a method is not defined,
|
||||
# scrapy acts as if the spider middleware does not modify the
|
||||
# passed objects.
|
||||
|
||||
@classmethod
|
||||
def from_crawler(cls, crawler):
|
||||
# This method is used by Scrapy to create your spiders.
|
||||
s = cls()
|
||||
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
|
||||
return s
|
||||
|
||||
def process_spider_input(self, response, spider):
|
||||
# Called for each response that goes through the spider
|
||||
# middleware and into the spider.
|
||||
|
||||
# Should return None or raise an exception.
|
||||
return None
|
||||
|
||||
def process_spider_output(self, response, result, spider):
|
||||
# Called with the results returned from the Spider, after
|
||||
# it has processed the response.
|
||||
|
||||
# Must return an iterable of Request, dict or Item objects.
|
||||
for i in result:
|
||||
yield i
|
||||
|
||||
def process_spider_exception(self, response, exception, spider):
|
||||
# Called when a spider or process_spider_input() method
|
||||
# (from other spider middleware) raises an exception.
|
||||
|
||||
# Should return either None or an iterable of Response, dict
|
||||
# or Item objects.
|
||||
pass
|
||||
|
||||
def process_start_requests(self, start_requests, spider):
|
||||
# Called with the start requests of the spider, and works
|
||||
# similarly to the process_spider_output() method, except
|
||||
# that it doesn’t have a response associated.
|
||||
|
||||
# Must return only requests (not items).
|
||||
for r in start_requests:
|
||||
yield r
|
||||
|
||||
def spider_opened(self, spider):
|
||||
spider.logger.info('Spider opened: %s' % spider.name)
|
||||
27
spiders/example_juejin/juejin/pipelines.py
Normal file
27
spiders/example_juejin/juejin/pipelines.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Define your item pipelines here
|
||||
#
|
||||
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
|
||||
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
|
||||
import os
|
||||
|
||||
from pymongo import MongoClient
|
||||
|
||||
MONGO_HOST = '192.168.99.100'
|
||||
MONGO_PORT = 27017
|
||||
MONGO_DB = 'crawlab_test'
|
||||
|
||||
|
||||
class JuejinPipeline(object):
|
||||
mongo = MongoClient(host=MONGO_HOST, port=MONGO_PORT)
|
||||
db = mongo[MONGO_DB]
|
||||
col_name = os.environ.get('CRAWLAB_COLLECTION')
|
||||
if not col_name:
|
||||
col_name = 'test'
|
||||
col = db[col_name]
|
||||
|
||||
def process_item(self, item, spider):
|
||||
item['task_id'] = os.environ.get('CRAWLAB_TASK_ID')
|
||||
self.col.save(item)
|
||||
return item
|
||||
89
spiders/example_juejin/juejin/settings.py
Normal file
89
spiders/example_juejin/juejin/settings.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Scrapy settings for juejin project
|
||||
#
|
||||
# For simplicity, this file contains only settings considered important or
|
||||
# commonly used. You can find more settings consulting the documentation:
|
||||
#
|
||||
# http://doc.scrapy.org/en/latest/topics/settings.html
|
||||
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
|
||||
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
|
||||
|
||||
BOT_NAME = 'juejin'
|
||||
|
||||
SPIDER_MODULES = ['juejin.spiders']
|
||||
NEWSPIDER_MODULE = 'juejin.spiders'
|
||||
|
||||
# Crawl responsibly by identifying yourself (and your website) on the user-agent
|
||||
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
|
||||
|
||||
# Obey robots.txt rules
|
||||
ROBOTSTXT_OBEY = True
|
||||
|
||||
# Configure maximum concurrent requests performed by Scrapy (default: 16)
|
||||
# CONCURRENT_REQUESTS = 32
|
||||
|
||||
# Configure a delay for requests for the same website (default: 0)
|
||||
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
|
||||
# See also autothrottle settings and docs
|
||||
# DOWNLOAD_DELAY = 3
|
||||
# The download delay setting will honor only one of:
|
||||
# CONCURRENT_REQUESTS_PER_DOMAIN = 16
|
||||
# CONCURRENT_REQUESTS_PER_IP = 16
|
||||
|
||||
# Disable cookies (enabled by default)
|
||||
# COOKIES_ENABLED = False
|
||||
|
||||
# Disable Telnet Console (enabled by default)
|
||||
# TELNETCONSOLE_ENABLED = False
|
||||
|
||||
# Override the default request headers:
|
||||
# DEFAULT_REQUEST_HEADERS = {
|
||||
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
# 'Accept-Language': 'en',
|
||||
# }
|
||||
|
||||
# Enable or disable spider middlewares
|
||||
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
|
||||
# SPIDER_MIDDLEWARES = {
|
||||
# 'juejin.middlewares.JuejinSpiderMiddleware': 543,
|
||||
# }
|
||||
|
||||
# Enable or disable downloader middlewares
|
||||
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
|
||||
# DOWNLOADER_MIDDLEWARES = {
|
||||
# 'juejin.middlewares.MyCustomDownloaderMiddleware': 543,
|
||||
# }
|
||||
|
||||
# Enable or disable extensions
|
||||
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
|
||||
# EXTENSIONS = {
|
||||
# 'scrapy.extensions.telnet.TelnetConsole': None,
|
||||
# }
|
||||
|
||||
# Configure item pipelines
|
||||
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
|
||||
ITEM_PIPELINES = {
|
||||
'juejin.pipelines.JuejinPipeline': 300,
|
||||
}
|
||||
|
||||
# Enable and configure the AutoThrottle extension (disabled by default)
|
||||
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
|
||||
# AUTOTHROTTLE_ENABLED = True
|
||||
# The initial download delay
|
||||
# AUTOTHROTTLE_START_DELAY = 5
|
||||
# The maximum download delay to be set in case of high latencies
|
||||
# AUTOTHROTTLE_MAX_DELAY = 60
|
||||
# The average number of requests Scrapy should be sending in parallel to
|
||||
# each remote server
|
||||
# AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
|
||||
# Enable showing throttling stats for every response received:
|
||||
# AUTOTHROTTLE_DEBUG = False
|
||||
|
||||
# Enable and configure HTTP caching (disabled by default)
|
||||
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
|
||||
# HTTPCACHE_ENABLED = True
|
||||
# HTTPCACHE_EXPIRATION_SECS = 0
|
||||
# HTTPCACHE_DIR = 'httpcache'
|
||||
# HTTPCACHE_IGNORE_HTTP_CODES = []
|
||||
# HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
||||
4
spiders/example_juejin/juejin/spiders/__init__.py
Normal file
4
spiders/example_juejin/juejin/spiders/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# This package will contain the spiders of your Scrapy project
|
||||
#
|
||||
# Please refer to the documentation for information on how to create and manage
|
||||
# your spiders.
|
||||
17
spiders/example_juejin/juejin/spiders/juejin_spider.py
Normal file
17
spiders/example_juejin/juejin/spiders/juejin_spider.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import scrapy
|
||||
from juejin.items import JuejinItem
|
||||
|
||||
|
||||
class JuejinSpiderSpider(scrapy.Spider):
|
||||
name = 'juejin_spider'
|
||||
allowed_domains = ['juejin.com']
|
||||
start_urls = ['https://juejin.im/search?query=celery']
|
||||
|
||||
def parse(self, response):
|
||||
for item in response.css('ul.main-list > li.item'):
|
||||
yield JuejinItem(
|
||||
title=item.css('.title span').extract_first(),
|
||||
link=item.css('a::attr("href")').extract_first(),
|
||||
like=item.css('.like .count::text').extract_first(),
|
||||
)
|
||||
11
spiders/example_juejin/scrapy.cfg
Normal file
11
spiders/example_juejin/scrapy.cfg
Normal file
@@ -0,0 +1,11 @@
|
||||
# Automatically created by: scrapy startproject
|
||||
#
|
||||
# For more information about the [deploy] section see:
|
||||
# https://scrapyd.readthedocs.org/en/latest/deploy.html
|
||||
|
||||
[settings]
|
||||
default = juejin.settings
|
||||
|
||||
[deploy]
|
||||
#url = http://localhost:6800/
|
||||
project = juejin
|
||||
Reference in New Issue
Block a user