Files
crawlab/frontend/src/i18n/zh.js
2020-03-11 10:22:39 +08:00

591 lines
28 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
export default {
// 菜单
'Home': '主页',
'Nodes': '节点',
'Node Detail': '节点详情',
'Spiders': '爬虫',
'Spider Detail': '爬虫详情',
'Task': '任务',
'Tasks': '任务',
'Task Detail': '任务详情',
'Schedules': '定时任务',
'Deploys': '部署',
'Sites': '网站',
'Setting': '设置',
'Project': '项目',
// 标签
'Overview': '概览',
'Files': '文件',
'Deployed Spiders': '已部署爬虫',
'Log': '日志',
'Results': '结果',
'Environment': '环境',
'Analytics': '分析',
'Rules': '规则',
'Config': '配置',
// 选择
'Spider': '爬虫',
// 块标题
'Latest Tasks': '最近任务',
'Latest Deploys': '最近部署',
// 任务状态
Pending: '待定',
Running: '进行中',
Finished: '已完成',
Error: '错误',
NA: '未知',
Cancelled: '已取消',
Abnormal: '异常',
// 操作
Add: '添加',
Create: '创建',
Run: '运行',
Deploy: '部署',
Save: '保存',
Cancel: '取消',
Import: '导入',
Submit: '提交',
'Import Spiders': '导入爬虫',
'Deploy All': '部署所有爬虫',
'Refresh': '刷新',
'View': '查看',
'Edit': '编辑',
'Remove': '删除',
'Confirm': '确认',
'Stop': '停止',
'Preview': '预览',
'Extract Fields': '提取字段',
'Download': '下载',
'Download CSV': '下载CSV',
'Upload Zip File': '上传Zip文件',
'Upload': '上传',
'Item Threshold': '子项阈值',
'Back': '返回',
'New File': '新建文件',
'Rename': '重命名',
'Install': '安装',
'Uninstall': '卸载',
'Create Directory': '新建目录',
'Create File': '新建文件',
'Add Node': '添加节点',
'Add Project': '添加项目',
'Sync': '同步',
'Auto Sync': '自动同步',
'Sync Frequency': '同步频率',
'Reset': '重置',
'Copy': '复制',
'Upgrade': '版本升级',
'Ok': '确定',
// 主页
'Total Tasks': '总任务数',
'Active Nodes': '在线节点',
'Total Deploys': '总部署数',
'Daily New Tasks': '每日新增任务数',
// 节点
'Node Info': '节点信息',
'Node Name': '节点名称',
'Node IP': '节点IP',
'Node MAC': '节点MAC',
'Node Port': '节点端口',
'Description': '描述',
'All Nodes': '所有节点',
'Node List': '节点列表',
'Network': '拓扑图',
'Node Network': '节点拓扑图',
'Master': '主节点',
'Worker': '工作节点',
'Installation': '安装',
'Search Dependencies': '搜索依赖',
// 节点列表
'IP': 'IP地址',
'Port': '端口',
// 节点状态
Online: '在线',
Offline: '离线',
Unavailable: '未知',
// 爬虫
'Spider Info': '爬虫信息',
'Spider ID': '爬虫ID',
'Spider Name': '爬虫名称',
'Source Folder': '代码目录',
'Execute Command': '执行命令',
'Results Collection': '结果集',
'Spider Type': '爬虫类型',
'Language': '语言',
'Schedule Enabled': '是否开启定时任务',
'Schedule Cron': '定时任务',
'Variable': '变量',
'Value': '值',
'Add Environment Variables': '添加环境变量',
'Add Spider': '添加爬虫',
'Add Configurable Spider': '添加可配置爬虫',
'Add Customized Spider': '添加自定义爬虫',
'Add Field': '添加字段',
'Last 7-Day Tasks': '最近7天任务数',
'Last 5-Run Errors': '最近5次运行错误数',
'30-Day Tasks': '最近30天任务数',
'30-Day Results': '最近30天结果数',
'Success Rate': '运行成功率',
'Avg Duration (sec)': '平均运行时长()',
'Tasks by Status': '分状态任务数',
'Tasks by Node': '分节点任务数',
'Daily Tasks': '每日任务数',
'Daily Avg Duration (sec)': '每日平均运行时长()',
'Configurable Spider': '可配置爬虫',
'Customized Spider': '自定义爬虫',
'Configurable': '可配置',
'Customized': '自定义',
'configurable': '可配置',
'customized': '自定义',
'Text': '文本',
'Attribute': '属性',
'Field Name': '字段名称',
'Query Type': '查询类别',
'Query': '查询',
'Extract Type': '提取类别',
'CSS Selector': 'CSS选择器',
'CSS': 'CSS',
'XPath': 'Xpath',
'Crawl Type': '抓取类别',
'List Only': '仅列表',
'Detail Only': '仅详情页',
'List + Detail': '列表详情页',
'Start URL': '开始URL',
'Item Selector': '列表项选择器',
'Item Selector Type': '列表项选择器类别',
'Pagination Selector': '分页选择器',
'Pagination Selector Type': '分页项选择器类别',
'Preview Results': '预览结果',
'Obey robots.txt': '遵守Robots协议',
'List Page Fields': '列表页字段',
'Detail Page Fields': '详情页字段',
'Detail Page URL': '详情页URL',
'All': '全部',
'Stages': '阶段',
'Process': '流程',
'Stage Process': '流程图',
'Stage Name': '阶段名称',
'Start Stage': '开始阶段',
'Engine': '引擎',
'Selector Type': '选择器类别',
'Selector': '选择器',
'Is Attribute': '是否为属性',
'Next Stage': '下一阶段',
'No Next Stage': '没有下一阶段',
'Fields': '字段',
'Stage': '阶段',
'Is List': '是否为列表',
'List': '列表',
'Pagination': '分页',
'Settings': '设置',
'Display Name': '显示名称',
'Template': '模版',
'Is Scrapy': '是否为 Scrapy',
'Scrapy Spider': 'Scrapy 爬虫',
'Scrapy Spiders': 'Scrapy 爬虫',
'Scrapy Log Level': 'Scrapy 日志等级',
'Parameter Name': '参数名',
'Parameter Value': '参数值',
'Parameter Type': '参数类别',
'Other': '其他',
'Scrapy Config': 'Scrapy 配置',
'Scrapy Settings': 'Scrapy 设置',
'Variable Name': '变量名',
'Variable Type': '变量类型',
'Variable Value': '变量值',
'Parameter Edit': '参数编辑',
'Add Scrapy Spider': '添加 Scrapy 爬虫',
'Is Git': '是否为 Git',
'Git Settings': 'Git 设置',
'Git URL': 'Git URL',
'Git Branch': 'Git 分支',
'Git Username': 'Git 用户名',
'Git Password': 'Git 密码',
'Has Credential': '需要验证',
'SSH Public Key': 'SSH 公钥',
'Is Long Task': '是否为长任务',
'Long Task': '长任务',
'Running Task Count': '运行中的任务数',
'Running Tasks': '运行中的任务',
'Item Name': 'Item 名称',
'Add Item': '添加 Item',
'Add Variable': '添加变量',
'Copy Spider': '复制爬虫',
'New Spider Name': '新爬虫名称',
// 爬虫列表
'Name': '名称',
'Last Run': '上次运行',
'Action': '操作',
'No command line': '没有执行命令',
'Last Status': '上次运行状态',
'Remark': '备注',
// 任务
'Task Info': '任务信息',
'Task ID': '任务ID',
'Status': '状态',
'Log File Path': '日志文件路径',
'Create Timestamp': '创建时间',
'Finish Timestamp': '完成时间',
'Duration (sec)': '用时()',
'Error Message': '错误信息',
'Results Count': '结果数',
'Average Results Count per Second': '抓取速度(/)',
'Wait Duration (sec)': '等待时长()',
'Runtime Duration (sec)': '运行时长()',
'Total Duration (sec)': '总时长()',
'Run Type': '运行类型',
'Random': '随机',
'Selected Nodes': '指定节点',
'Search Log': '搜索日志',
'Auto-Scroll': '自动滚动',
'Updating log...': '正在更新日志...',
'Error Count': '错误数',
'Log with errors': '日志错误',
'Empty results': '空结果',
'Navigate to Spider': '导航到爬虫',
'Navigate to Node': '导航到节点',
// 任务列表
'Node': '节点',
'Create Time': '创建时间',
'Start Time': '开始时间',
'Finish Time': '结束时间',
'Update Time': '更新时间',
// 部署
'Time': '时间',
// 项目
'All Tags': '全部标签',
'Project Name': '项目名称',
'Project Description': '项目描述',
'Tags': '标签',
'Enter Tags': '输入标签',
'No Project': '无项目',
'All Projects': '所有项目',
// 定时任务
'Schedule Name': '定时任务名称',
'Schedule Description': '定时任务描述',
'Parameters': '参数',
'Add Schedule': '添加定时任务',
'stop': '暂停',
'running': '运行',
'error': '错误',
'Not Found Node': '节点配置错误',
'Not Found Spider': '爬虫配置错误',
'[minute] [hour] [day] [month] [day of week]': '[] [] [] [] [星期几]',
'Enable/Disable': '启用/禁用',
'Cron': 'Cron',
'Cron Expression': 'Cron 表达式',
'Cron expression is invalid': 'Cron 表达式不正确',
// 网站
'Site': '网站',
'Rank': '排名',
'Domain': '域名',
'Main Category': '主类别',
'Category': '类别',
'Select': '请选择',
'Select Main Category': '请选择主类别',
'Select Category': '请选择类别',
'Spider Count': '爬虫数',
'Robots Protocol': 'Robots 协议',
'Home Page Response Time (sec)': '首页响应时间()',
'Home Page Response Status Code': '首页响应状态码',
// 用户
'Super Admin': '超级管理员',
// 文件
'Choose Folder': '选择文件',
'File': '文件',
'Folder': '文件夹',
'Directory': '目录',
// 导入
'Import Spider': '导入爬虫',
'Source URL': '来源URL',
'Source Type': '来源类别',
// 搜索
Search: '搜索',
// 下拉框
User: '用户',
Logout: '退出登录',
Documentation: '文档',
// 变量类型
'String': '字符串',
'Number': '数字',
'Boolean': '布尔值',
'Array/List': '数组/列表',
'Object/Dict': '对象/字典',
// 选择
'Yes': '是',
'No': '否',
// 系统
'OS': '操作系统',
'ARCH': '操作架构',
'Number of CPU': 'CPU数',
'Executables': '执行文件',
'Latest Version': '最新版本',
'Version': '版本',
'Installed': '已安装',
'Not Installed': '未安装',
'Installing': '正在安装',
'Install All': '安装全部',
'Other language installing': '其他语言正在安装',
'This language is not installed yet.': '语言还未安装',
'Languages': '语言',
'Dependencies': '依赖',
'Install on All Nodes': '安装在所有节点',
// 弹出框
'Notification': '提示',
'Are you sure to delete this node?': '你确定要删除该节点?',
'Are you sure to run this spider?': '你确定要运行该爬虫?',
'Are you sure to delete this file/directory?': '你确定要删除该文件/文件夹?',
'Added spider successfully': '成功添加爬虫',
'Uploaded spider files successfully': '成功上传爬虫文件',
'Node info has been saved successfully': '节点信息已成功保存',
'A task has been scheduled successfully': '已经成功派发一个任务',
'Are you sure to delete this spider?': '你确定要删除该爬虫?',
'Are you sure to delete this user?': '你确定要删除该用户?',
'Spider info has been saved successfully': '爬虫信息已成功保存',
'Do you allow us to collect some statistics to improve Crawlab?': '您允许我们收集统计数据以更好地优化Crawlab',
'Saved file successfully': '成功保存文件',
'An error happened when fetching the data': '请求数据时出错',
'Error when logging in (Please read documentation Q&A)': '登录时出错请查看文档 Q&A',
'Please enter the correct username': '请输入正确用户名',
'Password length should be no shorter than 5': '密码长度不能小于5',
'Two passwords must be the same': '两个密码必须要一致',
'username already exists': '用户名已存在',
'Deleted successfully': '成功删除',
'Saved successfully': '成功保存',
'Renamed successfully': '重命名保存',
'You can click "Add" to create an empty spider and upload files later.': '您可以点击"添加"按钮创建空的爬虫之后再上传文件',
'OR, you can also click "Upload" and upload a zip file containing your spider project.': '或者您也可以点击"上传"按钮并上传一个包含爬虫项目的 zip 文件',
'NOTE: When uploading a zip file, please zip your spider files from the ROOT DIRECTORY.': '注意: 上传 zip 文件时请从 根目录 下开始压缩爬虫文件',
'English': 'English',
'Are you sure to delete the schedule task?': '确定删除定时任务?',
' is not installed, do you want to install it?': ' 还没有安装您是否打算安装它?',
'Disclaimer': '免责声明',
'Please search dependencies': '请搜索依赖',
'No Data': '暂无数据',
'Show installed': '查看已安装',
'Installing dependency successful': '安装依赖成功',
'Installing dependency failed': '安装依赖失败',
'You have successfully installed a dependency: ': '您已成功安装依赖: ',
'The dependency installation is unsuccessful: ': '安装依赖失败: ',
'Uninstalling dependency successful': '卸载依赖成功',
'Uninstalling dependency failed': '卸载依赖失败',
'You have successfully uninstalled a dependency: ': '您已成功卸载依赖: ',
'The dependency uninstallation is unsuccessful: ': '卸载依赖失败: ',
'Installing language successful': '安装语言成功',
'Installing language failed': '安装语言失败',
'You have successfully installed a language: ': '您已成功安装语言: ',
'The language installation is unsuccessful: ': '安装语言失败: ',
'Enabling the schedule successful': '启用定时任务成功',
'Disabling the schedule successful': '禁用定时任务成功',
'Enabling the schedule unsuccessful': '启用定时任务失败',
'Disabling the schedule unsuccessful': '禁用定时任务失败',
'The schedule has been removed': '已删除定时任务',
'The schedule has been added': '已添加定时任务',
'The schedule has been saved': '已保存定时任务',
'Email format invalid': '邮箱地址格式不正确',
'Please select a file or click the add button on the left.': '请在左侧选择一个文件或点击添加按钮.',
'New Directory': '新建目录',
'Enter new directory name': '输入新目录名称',
'New directory name': '新目录名称',
'Enter new file name': '输入新文件名称',
'New file name': '新文件名称',
'Release Note': '发布记录',
'How to Upgrade': '升级方式',
'Release': '发布',
'Add Wechat to join discussion group': '添加微信 tikazyq1 加入交流群',
// 登录
'Sign in': '登录',
'Sign-in': '登录',
'Sign out': '退出登录',
'Sign-out': '退出登录',
'Sign up': '注册',
'Sign-up': '注册',
'Forgot Password': '忘记密码',
'Has Account': '已有账号',
'New to Crawlab': 'Crawlab新用户',
'Initial Username/Password': '初始用户名/密码',
'Username': '用户名',
'Password': '密码',
'Confirm Password': '确认密码',
'normal': '普通用户',
'admin': '管理用户',
'Role': '角色',
'Edit User': '更改用户',
'Users': '用户',
'Email': '邮箱',
'Optional': '可选',
// 设置
'Notification Trigger Timing': '消息通知触发时机',
'On Task End': '当任务结束',
'On Task Error': '当任务发生错误',
'Never': '从不',
'DingTalk Robot Webhook': '钉钉机器人 Webhook',
'Wechat Robot Webhook': '微信机器人 Webhook',
'Password Settings': '密码设置',
'Notifications': '消息通知',
'Global Variable': '全局变量',
'Add Global Variable': '新增全局变量',
'Are you sure to delete this global variable': '确定删除该全局变量',
'Key': '设置',
'Allow Sending Statistics': '允许发送统计信息',
'General': '通用',
'Enable Tutorial': '启用教程',
// 全局
'Related Documentation': '相关文档',
'Click to view related Documentation': '点击查看相关文档',
// 其他
tagsView: {
closeOthers: '关闭其他',
close: '关闭',
refresh: '刷新',
closeAll: '关闭所有'
},
nodeList: {
type: '节点类型'
},
schedules: {
cron: 'Cron',
add_cron: '生成Cron',
// Cron Format: [second] [minute] [hour] [day of month] [month] [day of week]
cron_format: 'Cron 格式: [] [] [小时] [] [] []'
},
// 内容
addNodeInstruction: `
您不能在 Crawlab 的 Web 界面直接添加节点。
添加节点的方式非常简单,您只需要在目标机器上运行一个 Crawlab 服务就可以了。
#### Docker 部署
如果您是用 Docker 启动 Crawlab可以在目标机器上运行一个新的 \`worker\` 容器,或者在 \`docker-compose.yml\` 中添加 \`worker\` 服务。
\`\`\`bash
docker run -d --restart always --name crawlab_worker \\
-e CRAWLAB_SERVER_MASTER=N \\
-e CRAWLAB_MONGO_HOST=xxx.xxx.xxx.xxx \\ # 保证连接的是同一个 MongoDB
-e CRAWLAB_REDIS_ADDRESS=xxx.xxx.xxx.xxx \\ # 保证连接的是同一个 Redis
tikazyq/crawlab:latest
\`\`\`
#### 直接部署
如果您是用直接部署,只需要在目标机器上启动一个后端服务,请参考 [直接部署文档](https://docs.crawlab.cn/Installation/Direct.html)。
更多信息,请参考 [官方文档](https://docs.crawlab.cn)。
`,
// 教程
'Skip': '跳过',
'Previous': '上一步',
'Next': '下一步',
'Finish': '结束',
'Click to add a new spider.<br><br>You can also add a <strong>Customized Spider</strong> through <a href="https://docs.crawlab.cn/Usage/SDK/CLI.html" target="_blank" style="color: #409EFF">CLI Tool</a>.': '点击并添加爬虫<br><br>您也可以通过 <a href="https://docs.crawlab.cn/Usage/SDK/CLI.html" target="_blank" style="color: #409EFF">CLI 工具</a> 添加<strong>自定义爬虫</strong>',
'You can view your created spiders here.<br>Click a table row to view <strong>spider details</strong>.': '您可以查看创建的爬虫<br>点击行来查看<strong>爬虫详情</strong>',
'View a list of <strong>Configurable Spiders</strong>': '查看<strong>可配置爬虫</strong>列表',
'View a list of <strong>Customized Spiders</strong>': '查看<strong>自定义爬虫</strong>列表',
'<strong>Customized Spider</strong> is a highly customized spider, which is able to run on any programming language and any web crawler framework.': '<strong>自定义爬虫</strong>是高度自定义化的爬虫能够运行任何编程语言和爬虫框架',
'<strong>Configurable Spider</strong> is a spider defined by config data, aimed at streamlining spider development and improving dev efficiency.': '<strong>可配置爬虫</strong>被配置数据所定义旨在将爬虫开发流程化以及提高爬虫开发效率',
'Unique identifier for the spider': '爬虫的唯一识别符',
'How the spider is displayed on Crawlab': '爬虫在 Crawlab 上的展示名称',
'A shell command to be executed when the spider is triggered to run (only available for <strong>Customized Spider</strong>': '当爬虫被触发时执行的一行 Shell 命令<strong>自定义爬虫</strong>有效',
'Where the results are stored in the database': '抓取结果在数据库中储存的位置',
'Upload a zip file containing all spider files to create the spider (only available for <strong>Customized Spider</strong>)': '上传一个包含所有爬虫文件的 zip 文件然后创建爬虫<strong>自定义爬虫</strong>有效',
'The spider template to create from (only available for <strong>Configurable Spider</strong>)': '创建爬虫时引用的模版<strong>可配置爬虫</strong>有效',
'Click to confirm to add the spider': '点击并确认添加爬虫',
'You can switch to each section of the spider detail.': '您可以切换到爬虫详情的每一个部分',
'You can switch to different spider using this selector.': '您可以通过这个选择器切换不同的爬虫',
'You can view latest tasks for this spider and click each row to view task detail.': '您可以查看最近的爬虫任务以及点击行来查看任务详情',
'You can edit the detail info for this spider.': '您可以编辑爬虫详情信息',
'Here you can action on the spider, including running a task, uploading a zip file and save the spider info.': '这里您可以对爬虫进行操作包括运行爬虫任务上传 zip 文件以及保存爬虫信息',
'File navigation panel.<br><br>You can right click on <br>each item to create or delete<br> a file/directory.': '文件导航栏<br><br>您可以右键点击一个元素<br>来添加或删除文件/文件夹',
'Click to add a file or directory<br> on the root directory.': '点击并添加一个文件<br>或文件夹',
'You can edit, save, rename<br> and delete the selected file <br>in this box.': '在这个栏位中您可以<br>编辑保存重命名<br>删除所选择的文件',
'Here you can add environment variables that will be passed to the spider program when running a task.': '这里您可以添加环境变量这些环境变量会被传入运行的爬虫程序中',
'You can add, edit and delete schedules (cron jobs) for the spider.': '您可以添加修改删除爬虫的定时任务',
'You can switch to each section of configurable spider.': '您可以切换到可配置爬虫的每一个部分',
'Here is the starting URL of the spider.': '这里是爬虫的起始URL',
'Here is the starting stage of the spider.<br><br>A <strong>Stage</strong> is basically a callback in the Scrapy spider.': '这里是爬虫的起始阶段<br><br><strong>阶段</strong>就是 Scrapy 爬虫中的回调函数',
'You can run a spider task.<br><br>Spider will be automatically saved when clicking on this button.': '您可以运行爬虫任务<br><br>点击该按钮会自动保存爬虫',
'Add/duplicate/delete a stage.': '添加/复制/删除阶段',
'Add/duplicate/delete an extract field in the stage.': '添加/复制/删除该阶段下的抓取字段',
'You can decide whether this is a list page.<br><br>Click on the CSS/XPath tag to enter the selector expression for list items.<br>For example, "<code>ul > li</code>"': '您可以决定这是否为一个列表页<br><br>点击 CSS/XPath 标签来输入列表元素的选择器表达式<br>例如 "<code>ul > li</code>"',
'You can decide whether this is a list page with pagination.<br><br>Click on the CSS/XPath tag to enter the selector expression for the pagination.<br>For example, "<code>a.next</code>"': '您可以决定这是否为一个含分页的列表页<br><br>点击 CSS/XPath 标签来输入分页的选择器表达式<br>例如 "<code>a.next</code>"',
'You should enter necessary information for all fields in the stage.': '您应该输入该阶段下所有字段的信息',
'If you have multiple stages, e.g. list page + detail page, you should select the next stage in the detail link\'s field.': '如果您有多个阶段例如列表页+详情页您应该在详情页链接字段中选择下一个阶段',
'You can view the<br> visualization of the stage<br> workflow.': '您可以查看阶段工作流的<br>可视化界面',
'You can add the settings here, which will be loaded in the Scrapy\'s <code>settings.py</code> file.<br><br>JSON and Array data are supported.': '您可以在这里添加设置它们会在 Scrapy 中的 <code>settings.py</code> 中被加载<br><br>JSON 和数组都支持',
'You can edit the <code>Spiderfile</code> here.<br><br>For more information, please refer to the <a href="https://docs.crawlab.cn/Usage/Spider/ConfigurableSpider.html" target="_blank" style="color: #409EFF">Documentation (Chinese)</a>.': '您可以在这里编辑 <code>Spiderfile</code><br><br>更多信息, 请参考 <a href="https://docs.crawlab.cn/Usage/Spider/ConfigurableSpider.html" target="_blank" style="color: #409EFF">文档</a>.',
'You can filter tasks from this area.': '您可以在这个区域筛选任务',
'This is a list of spider tasks executed sorted in a time descending order.': '这是执行过的爬虫任务的列表按时间降序排列',
'Click the row to or the view button to view the task detail.': '点击行或查看按钮来查看任务详情',
'Tick and select the tasks you would like to delete in batches.': '勾选您想批量删除的任务',
'Click this button to delete selected tasks.': '点击并删除勾选的任务',
'This is the info of the task detail.': '这是任务详情信息',
'This is the spider info of the task.': '这是任务的爬虫信息',
'You can click to view the spider detail for the task.': '您可以点击查看该任务的爬虫详情',
'This is the node info of the task.': '这是任务的节点信息',
'You can click to view the node detail for the task.': '您可以点击查看该任务的节点详情',
'Here you can view the log<br> details for the task. The<br> log is automatically updated.': '这里您可以查看该任务<br>的日志详情日志是<br>自动更新的',
'Here you can view the results scraped by the spider.<br><br><strong>Note:</strong> If you find your results here are empty, please refer to the <a href="https://docs.crawlab.cn/Integration/" target="_blank" style="color: #409EFF">Documentation (Chinese)</a> about how to integrate your spider into Crawlab.': '这里您可以查看爬虫抓取下来的结果<br><br><strong>注意:</strong> 如果这里结果是空的请参考 <a href="https://docs.crawlab.cn/Integration/" target="_blank" style="color: #409EFF">相关文档</a> 来集成您的爬虫到 Crawlab',
'You can download your results as a CSV file by clicking this button.': '您可以点击下载结果为 CSV 文件',
'Switch between different nodes.': '在节点间切换',
'You can view the latest executed spider tasks.': '您可以查看最近执行过的爬虫任务',
'This is the detailed node info.': '这是节点详情',
'Here you can install<br> dependencies and modules<br> that are required<br> in your spiders.': '这里您可以安装您爬虫中<br>需要的依赖或模块',
'You can search dependencies in the search box and install them by clicking the "Install" button below.': '您可以在搜索框中搜索依赖并点击下面的"安装"按钮来进行安装',
'You should fill the form before adding the new schedule.': '在添加新定时任务前您需要填写这个表单',
'The name of the schedule': '定时任务名称',
'The type of how to run the task.<br><br>Please refer to the <a href="https://docs.crawlab.cn/Usage/Spider/Run.html" target="_blank" style="color: #409EFF">Documentation (Chinese)</a> for detailed explanation for the options.<br><br>Let\'s select <strong>Selected Nodes</strong> for example.': '表示以哪种方式运行任务<br><br>请参考 <a href="https://docs.crawlab.cn/Usage/Spider/Run.html" target="_blank" style="color: #409EFF">文档</a> 参考选项解释<br><br>让我们选择 <strong>指定节点</strong> 这个选项',
'The spider to run': '运行的爬虫',
'<strong>Cron</strong> expression for the schedule.<br><br>If you are not sure what a cron expression is, please refer to this <a href="https://baike.baidu.com/item/crontab/8819388" target="_blank" style="color: #409EFF">Article</a>.': '定时任务的 <strong>Cron</strong> 表达式<br><br>如果您不清楚什么是 Cron 表达式请参考这篇 <a href="https://baike.baidu.com/item/crontab/8819388" target="_blank" style="color: #409EFF">文章英文</a>.',
'You can select the correct options in the cron config box to configure the cron expression.': '您可以在 Cron 配置栏里选择正确的选项来配置 Cron 表达式',
'The parameters which will be passed into the spider program.': '将被传入爬虫程序里的参数',
'The description for the schedule': '定时任务的描述',
'Once you have filled all fields, click this button to submit.': '当您填完所有字段请点击这个按钮来提交定时任务',
'Here you can set your general settings.': '这里您可以设置您的通用设置',
'In this tab you can configure your notification settings.': '在这个标签中您可以<br>配置您的消息通知配置',
'Here you can add/edit/delete global environment variables which will be passed into your spider programs.': '这里您可以添加/修改/删除全局环境变量它们会被传入爬虫程序中',
'You are running on a mobile device, which is not optimized yet. Please try with a laptop or desktop.': '您正在没有优化过的移动端上浏览我们建议您用电脑来访问',
'Git has been synchronized successfully': 'Git 已经成功同步',
'Git has been reset successfully': 'Git 已经成功重置',
'This would delete all files of the spider. Are you sure to continue?': '重置将删除该爬虫所有文件您希望继续吗',
'SSH Public Key is copied to the clipboard': 'SSH 公钥已粘贴到剪切板',
'Removed successfully': '已成功删除',
'Are you sure to delete selected items?': '您是否确认删除所选项',
'Are you sure to stop selected items?': '您是否确认停止所选项',
'Sent signals to cancel selected tasks': '已经向所选任务发送取消任务信号',
'Copied successfully': '已成功复制',
// 其他
'Star crawlab-team/crawlab on GitHub': ' GitHub 上为 Crawlab 加星吧'
}