diff --git a/backend/conf/config.yml b/backend/conf/config.yml
index 3805762a..a5e0b23b 100644
--- a/backend/conf/config.yml
+++ b/backend/conf/config.yml
@@ -20,7 +20,7 @@ log:
server:
host: 0.0.0.0
port: 8000
- master: "N"
+ master: "Y"
secret: "crawlab"
register:
# mac地址 或者 ip地址,如果是ip,则需要手动指定IP
diff --git a/backend/config/config.go b/backend/config/config.go
index 4d83c0f7..e4c4616c 100644
--- a/backend/config/config.go
+++ b/backend/config/config.go
@@ -28,7 +28,7 @@ func (c *Config) Init() error {
}
viper.SetConfigType("yaml") // 设置配置文件格式为YAML
viper.AutomaticEnv() // 读取匹配的环境变量
- viper.SetEnvPrefix("CRAWLAB") // 读取环境变量的前缀为APISERVER
+ viper.SetEnvPrefix("CRAWLAB") // 读取环境变量的前缀为CRAWLAB
replacer := strings.NewReplacer(".", "_")
viper.SetEnvKeyReplacer(replacer)
if err := viper.ReadInConfig(); err != nil { // viper解析配置文件
diff --git a/backend/constants/anchor.go b/backend/constants/anchor.go
new file mode 100644
index 00000000..f462135f
--- /dev/null
+++ b/backend/constants/anchor.go
@@ -0,0 +1,8 @@
+package constants
+
+const (
+ AnchorStartStage = "START_STAGE"
+ AnchorStartUrl = "START_URL"
+ AnchorItems = "ITEMS"
+ AnchorParsers = "PARSERS"
+)
diff --git a/backend/constants/config_spider.go b/backend/constants/config_spider.go
new file mode 100644
index 00000000..c29624dc
--- /dev/null
+++ b/backend/constants/config_spider.go
@@ -0,0 +1,6 @@
+package constants
+
+const (
+ EngineScrapy = "scrapy"
+ EngineColly = "colly"
+)
diff --git a/backend/constants/scrapy.go b/backend/constants/scrapy.go
new file mode 100644
index 00000000..bc82508f
--- /dev/null
+++ b/backend/constants/scrapy.go
@@ -0,0 +1,5 @@
+package constants
+
+const ScrapyProtectedStageNames = ""
+
+const ScrapyProtectedFieldNames = "_id,task_id,ts"
diff --git a/backend/constants/spider.go b/backend/constants/spider.go
index b4b7f65e..5119aa67 100644
--- a/backend/constants/spider.go
+++ b/backend/constants/spider.go
@@ -3,4 +3,5 @@ package constants
const (
Customized = "customized"
Configurable = "configurable"
+ Plugin = "plugin"
)
diff --git a/backend/database/redis.go b/backend/database/redis.go
index 348a74bb..bffc40be 100644
--- a/backend/database/redis.go
+++ b/backend/database/redis.go
@@ -102,7 +102,7 @@ func NewRedisPool() *redis.Pool {
return redis.DialURL(url,
redis.DialConnectTimeout(time.Second*10),
redis.DialReadTimeout(time.Second*10),
- redis.DialWriteTimeout(time.Second*10),
+ redis.DialWriteTimeout(time.Second*15),
)
},
TestOnBorrow: func(c redis.Conn, t time.Time) error {
diff --git a/backend/entity/config_spider.go b/backend/entity/config_spider.go
new file mode 100644
index 00000000..5e0fe1e1
--- /dev/null
+++ b/backend/entity/config_spider.go
@@ -0,0 +1,25 @@
+package entity
+
+type Field struct {
+ Name string `yaml:"name" json:"name"`
+ Css string `yaml:"css" json:"css"`
+ Xpath string `yaml:"xpath" json:"xpath"`
+ Attr string `yaml:"attr" json:"attr"`
+ NextStage string `yaml:"next_stage" json:"next_stage"`
+}
+
+type Stage struct {
+ IsList bool `yaml:"is_list" json:"is_list"`
+ ListCss string `yaml:"list_css" json:"list_css"`
+ PageCss string `yaml:"page_css" json:"page_css"`
+ PageAttr string `yaml:"page_attr" json:"page_attr"`
+ Fields []Field `yaml:"fields" json:"fields"`
+}
+
+type ConfigSpiderData struct {
+ Version string `yaml:"version" json:"version"`
+ Engine string `yaml:"engine" json:"engine"`
+ StartUrl string `yaml:"start_url" json:"start_url"`
+ StartStage string `yaml:"start_stage" json:"start_stage"`
+ Stages map[string]Stage `yaml:"stages" json:"stages"`
+}
diff --git a/backend/go.mod b/backend/go.mod
index 428c2fd3..d59b6d41 100644
--- a/backend/go.mod
+++ b/backend/go.mod
@@ -17,4 +17,5 @@ require (
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337
github.com/spf13/viper v1.4.0
gopkg.in/go-playground/validator.v9 v9.29.1
+ gopkg.in/yaml.v2 v2.2.2
)
diff --git a/backend/main.go b/backend/main.go
index 2c92ab37..565c7892 100644
--- a/backend/main.go
+++ b/backend/main.go
@@ -110,7 +110,6 @@ func main() {
if model.IsMaster() {
// 中间件
app.Use(middlewares.CORSMiddleware())
- //app.Use(middlewares.AuthorizationMiddleware())
anonymousGroup := app.Group("/")
{
anonymousGroup.POST("/login", routes.Login) // 用户登录
@@ -130,7 +129,7 @@ func main() {
// 爬虫
authGroup.GET("/spiders", routes.GetSpiderList) // 爬虫列表
authGroup.GET("/spiders/:id", routes.GetSpider) // 爬虫详情
- authGroup.POST("/spiders", routes.PutSpider) // 上传爬虫
+ authGroup.POST("/spiders", routes.PutSpider) // 上传爬虫 TODO: 名称不对
authGroup.POST("/spiders/:id", routes.PostSpider) // 修改爬虫
authGroup.POST("/spiders/:id/publish", routes.PublishSpider) // 发布爬虫
authGroup.DELETE("/spiders/:id", routes.DeleteSpider) // 删除爬虫
@@ -140,6 +139,10 @@ func main() {
authGroup.GET("/spiders/:id/dir", routes.GetSpiderDir) // 爬虫目录
authGroup.GET("/spiders/:id/stats", routes.GetSpiderStats) // 爬虫统计数据
authGroup.GET("/spider/types", routes.GetSpiderTypes) // 爬虫类型
+ // 可配置爬虫
+ authGroup.PUT("/config_spiders", routes.PutConfigSpider) // 添加可配置爬虫
+ authGroup.POST("/config_spiders/:id", routes.PostConfigSpider) // 修改可配置爬虫
+ authGroup.POST("/config_spiders/:id/upload", routes.UploadConfigSpider) // 上传可配置爬虫
// 任务
authGroup.GET("/tasks", routes.GetTaskList) // 任务列表
authGroup.GET("/tasks/:id", routes.GetTask) // 任务详情
diff --git a/backend/model/config_spider/common.go b/backend/model/config_spider/common.go
new file mode 100644
index 00000000..c803755a
--- /dev/null
+++ b/backend/model/config_spider/common.go
@@ -0,0 +1,30 @@
+package config_spider
+
+import "crawlab/entity"
+
+func GetAllFields(data entity.ConfigSpiderData) []entity.Field {
+ var fields []entity.Field
+ for _, stage := range data.Stages {
+ for _, field := range stage.Fields {
+ fields = append(fields, field)
+ }
+ }
+ return fields
+}
+
+func GetStartStageName(data entity.ConfigSpiderData) string {
+ // 如果 start_stage 设置了且在 stages 里,则返回
+ if data.StartStage != "" {
+ for stageName := range data.Stages {
+ if stageName == data.StartStage {
+ return data.StartStage
+ }
+ }
+ }
+
+ // 否则返回第一个 stage
+ for stageName := range data.Stages {
+ return stageName
+ }
+ return ""
+}
diff --git a/backend/model/config_spider/scrapy.go b/backend/model/config_spider/scrapy.go
new file mode 100644
index 00000000..7503b9bf
--- /dev/null
+++ b/backend/model/config_spider/scrapy.go
@@ -0,0 +1,228 @@
+package config_spider
+
+import (
+ "crawlab/constants"
+ "crawlab/entity"
+ "crawlab/model"
+ "crawlab/utils"
+ "errors"
+ "fmt"
+ "path/filepath"
+)
+
+type ScrapyGenerator struct {
+ Spider model.Spider
+ ConfigData entity.ConfigSpiderData
+}
+
+// 生成爬虫文件
+func (g ScrapyGenerator) Generate() error {
+ // 生成 items.py
+ if err := g.ProcessItems(); err != nil {
+ return err
+ }
+
+ // 生成 spider.py
+ if err := g.ProcessSpider(); err != nil {
+ return err
+ }
+ return nil
+}
+
+// 生成 items.py
+func (g ScrapyGenerator) ProcessItems() error {
+ // 待处理文件名
+ src := g.Spider.Src
+ filePath := filepath.Join(src, "config_spider", "items.py")
+
+ // 获取所有字段
+ fields := g.GetAllFields()
+
+ // 字段名列表(包含默认字段名)
+ fieldNames := []string{
+ "_id",
+ "task_id",
+ "ts",
+ }
+
+ // 加入字段
+ for _, field := range fields {
+ fieldNames = append(fieldNames, field.Name)
+ }
+
+ // 将字段名转化为python代码
+ str := ""
+ for _, fieldName := range fieldNames {
+ line := g.PadCode(fmt.Sprintf("%s = scrapy.Field()", fieldName), 1)
+ str += line
+ }
+
+ // 将占位符替换为代码
+ if err := utils.SetFileVariable(filePath, constants.AnchorItems, str); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// 生成 spider.py
+func (g ScrapyGenerator) ProcessSpider() error {
+ // 待处理文件名
+ src := g.Spider.Src
+ filePath := filepath.Join(src, "config_spider", "spiders", "spider.py")
+
+ // 替换 start_stage
+ if err := utils.SetFileVariable(filePath, constants.AnchorStartStage, "parse_"+GetStartStageName(g.ConfigData)); err != nil {
+ return err
+ }
+
+ // 替换 start_url
+ if err := utils.SetFileVariable(filePath, constants.AnchorStartUrl, g.ConfigData.StartUrl); err != nil {
+ return err
+ }
+
+ // 替换 parsers
+ strParser := ""
+ for stageName, stage := range g.ConfigData.Stages {
+ stageStr := g.GetParserString(stageName, stage)
+ strParser += stageStr
+ }
+ if err := utils.SetFileVariable(filePath, constants.AnchorParsers, strParser); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (g ScrapyGenerator) GetParserString(stageName string, stage entity.Stage) string {
+ // 构造函数定义行
+ strDef := g.PadCode(fmt.Sprintf("def parse_%s(self, response):", stageName), 1)
+
+ strParse := ""
+ if stage.IsList {
+ // 列表逻辑
+ strParse = g.GetListParserString(stageName, stage)
+ } else {
+ // 非列表逻辑
+ strParse = g.GetNonListParserString(stageName, stage)
+ }
+
+ // 构造
+ str := fmt.Sprintf(`%s%s`, strDef, strParse)
+
+ return str
+}
+
+func (g ScrapyGenerator) PadCode(str string, num int) string {
+ res := ""
+ for i := 0; i < num; i++ {
+ res += " "
+ }
+ res += str
+ res += "\n"
+ return res
+}
+
+func (g ScrapyGenerator) GetNonListParserString(stageName string, stage entity.Stage) string {
+ str := ""
+
+ // 获取或构造item
+ str += g.PadCode("item = Item() if response.meta.get('item') is None else response.meta.get('item')", 2)
+
+ // 遍历字段列表
+ for _, f := range stage.Fields {
+ line := ""
+ if f.Attr == "" {
+ line += fmt.Sprintf(`item['%s'] = response.css('%s::text').extract_first()`, f.Name, f.Css)
+ } else {
+ line += fmt.Sprintf(`item['%s'] = response.css('%s::attr("%s")').extract_first()`, f.Name, f.Css, f.Attr)
+ }
+ line = g.PadCode(line, 2)
+ str += line
+ }
+
+ // next stage 字段
+ if f, err := g.GetNextStageField(stage); err == nil {
+ // 如果找到 next stage 字段,进行下一个回调
+ str += g.PadCode(fmt.Sprintf(`yield scrapy.Request(url="get_real_url(response, item['%s'])", callback=self.parse_%s, meta={'item': item})`, f.Name, f.NextStage), 2)
+ } else {
+ // 如果没找到 next stage 字段,返回 item
+ str += g.PadCode(fmt.Sprintf(`yield item`), 2)
+ }
+
+ // 加入末尾换行
+ str += g.PadCode("", 0)
+
+ return str
+}
+
+func (g ScrapyGenerator) GetListParserString(stageName string, stage entity.Stage) string {
+ str := ""
+
+ // 获取前一个 stage 的 item
+ str += g.PadCode(`prev_item = response.meta.get('item')`, 2)
+
+ // for 循环遍历列表
+ str += g.PadCode(fmt.Sprintf(`for elem in response.css('%s'):`, stage.ListCss), 2)
+
+ // 构造item
+ str += g.PadCode(`item = Item()`, 3)
+
+ // 遍历字段列表
+ for _, f := range stage.Fields {
+ line := ""
+ if f.Attr == "" {
+ line += fmt.Sprintf(`item['%s'] = elem.css('%s::text').extract_first()`, f.Name, f.Css)
+ } else {
+ line += fmt.Sprintf(`item['%s'] = elem.css('%s::attr("%s")').extract_first()`, f.Name, f.Css, f.Attr)
+ }
+ line = g.PadCode(line, 3)
+ str += line
+ }
+
+ // 把前一个 stage 的 item 值赋给当前 item
+ str += g.PadCode(`if prev_item is not None:`, 3)
+ str += g.PadCode(`for key, value in prev_item.items():`, 4)
+ str += g.PadCode(`item[key] = value`, 5)
+
+ // next stage 字段
+ if f, err := g.GetNextStageField(stage); err == nil {
+ // 如果找到 next stage 字段,进行下一个回调
+ str += g.PadCode(fmt.Sprintf(`yield scrapy.Request(url=get_real_url(response, item['%s']), callback=self.parse_%s, meta={'item': item})`, f.Name, f.NextStage), 3)
+ } else {
+ // 如果没找到 next stage 字段,返回 item
+ str += g.PadCode(fmt.Sprintf(`yield item`), 3)
+ }
+
+ // 分页
+ if stage.PageCss != "" {
+ // 分页元素属性,默认为 href
+ pageAttr := "href"
+ if stage.PageAttr != "" {
+ pageAttr = stage.PageAttr
+ }
+
+ str += g.PadCode(fmt.Sprintf(`next_url = response.css('%s::attr("%s")').extract_first()`, stage.PageCss, pageAttr), 2)
+ str += g.PadCode(fmt.Sprintf(`yield scrapy.Request(url=get_real_url(response, next_url), callback=self.parse_%s, meta={'item': item})`, stageName), 2)
+ }
+
+ // 加入末尾换行
+ str += g.PadCode("", 0)
+
+ return str
+}
+
+// 获取所有字段
+func (g ScrapyGenerator) GetAllFields() []entity.Field {
+ return GetAllFields(g.ConfigData)
+}
+
+// 获取包含 next stage 的字段
+func (g ScrapyGenerator) GetNextStageField(stage entity.Stage) (entity.Field, error) {
+ for _, field := range stage.Fields {
+ if field.NextStage != "" {
+ return field, nil
+ }
+ }
+ return entity.Field{}, errors.New("cannot find next stage field")
+}
diff --git a/backend/model/spider.go b/backend/model/spider.go
index 5c2c92e8..53c5ab1f 100644
--- a/backend/model/spider.go
+++ b/backend/model/spider.go
@@ -25,6 +25,7 @@ type Spider struct {
Site string `json:"site" bson:"site"` // 爬虫网站
Envs []Env `json:"envs" bson:"envs"` // 环境变量
Remark string `json:"remark" bson:"remark"` // 备注
+
// 自定义爬虫
Src string `json:"src" bson:"src"` // 源码位置
Cmd string `json:"cmd" bson:"cmd"` // 执行命令
@@ -33,17 +34,7 @@ type Spider struct {
LastRunTs time.Time `json:"last_run_ts"` // 最后一次执行时间
LastStatus string `json:"last_status"` // 最后执行状态
- // TODO: 可配置爬虫
- //Fields []interface{} `json:"fields"`
- //DetailFields []interface{} `json:"detail_fields"`
- //CrawlType string `json:"crawl_type"`
- //StartUrl string `json:"start_url"`
- //UrlPattern string `json:"url_pattern"`
- //ItemSelector string `json:"item_selector"`
- //ItemSelectorType string `json:"item_selector_type"`
- //PaginationSelector string `json:"pagination_selector"`
- //PaginationSelectorType string `json:"pagination_selector_type"`
-
+ // 时间
CreateTs time.Time `json:"create_ts" bson:"create_ts"`
UpdateTs time.Time `json:"update_ts" bson:"update_ts"`
}
@@ -98,13 +89,14 @@ func (spider *Spider) GetLastTask() (Task, error) {
return tasks[0], nil
}
+// 删除爬虫
func (spider *Spider) Delete() error {
s, c := database.GetCol("spiders")
defer s.Close()
return c.RemoveId(spider.Id)
}
-// 爬虫列表
+// 获取爬虫列表
func GetSpiderList(filter interface{}, skip int, limit int) ([]Spider, int, error) {
s, c := database.GetCol("spiders")
defer s.Close()
@@ -136,7 +128,7 @@ func GetSpiderList(filter interface{}, skip int, limit int) ([]Spider, int, erro
return spiders, count, nil
}
-// 获取爬虫
+// 获取爬虫(根据FileId)
func GetSpiderByFileId(fileId bson.ObjectId) *Spider {
s, c := database.GetCol("spiders")
defer s.Close()
@@ -150,7 +142,7 @@ func GetSpiderByFileId(fileId bson.ObjectId) *Spider {
return result
}
-// 获取爬虫
+// 获取爬虫(根据名称)
func GetSpiderByName(name string) *Spider {
s, c := database.GetCol("spiders")
defer s.Close()
@@ -158,13 +150,13 @@ func GetSpiderByName(name string) *Spider {
var result *Spider
if err := c.Find(bson.M{"name": name}).One(&result); err != nil {
log.Errorf("get spider error: %s, spider_name: %s", err.Error(), name)
- debug.PrintStack()
+ //debug.PrintStack()
return nil
}
return result
}
-// 获取爬虫
+// 获取爬虫(根据ID)
func GetSpider(id bson.ObjectId) (Spider, error) {
s, c := database.GetCol("spiders")
defer s.Close()
@@ -245,7 +237,7 @@ func RemoveAllSpider() error {
return nil
}
-// 爬虫总数
+// 获取爬虫总数
func GetSpiderCount() (int, error) {
s, c := database.GetCol("spiders")
defer s.Close()
@@ -257,7 +249,7 @@ func GetSpiderCount() (int, error) {
return count, nil
}
-// 爬虫类型
+// 获取爬虫类型
func GetSpiderTypes() ([]*entity.SpiderType, error) {
s, c := database.GetCol("spiders")
defer s.Close()
diff --git a/backend/routes/config_spider.go b/backend/routes/config_spider.go
new file mode 100644
index 00000000..6f4a2893
--- /dev/null
+++ b/backend/routes/config_spider.go
@@ -0,0 +1,238 @@
+package routes
+
+import (
+ "crawlab/constants"
+ "crawlab/database"
+ "crawlab/entity"
+ "crawlab/model"
+ "crawlab/services"
+ "crawlab/utils"
+ "fmt"
+ "github.com/apex/log"
+ "github.com/gin-gonic/gin"
+ "github.com/globalsign/mgo/bson"
+ uuid "github.com/satori/go.uuid"
+ "github.com/spf13/viper"
+ "gopkg.in/yaml.v2"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "path/filepath"
+ "runtime/debug"
+)
+
+// 添加可配置爬虫
+func PutConfigSpider(c *gin.Context) {
+ var spider model.Spider
+ if err := c.ShouldBindJSON(&spider); err != nil {
+ HandleError(http.StatusBadRequest, c, err)
+ return
+ }
+
+ // 爬虫名称不能为空
+ if spider.Name == "" {
+ HandleErrorF(http.StatusBadRequest, c, "spider name should not be empty")
+ return
+ }
+
+ // 判断爬虫是否存在
+ if spider := model.GetSpiderByName(spider.Name); spider != nil {
+ HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("spider for '%s' already exists", spider.Name))
+ return
+ }
+
+ // 设置爬虫类别
+ spider.Type = constants.Configurable
+
+ // 将FileId置空
+ spider.FileId = bson.ObjectIdHex(constants.ObjectIdNull)
+
+ // 创建爬虫目录
+ spiderDir := filepath.Join(viper.GetString("spider.path"), spider.Name)
+ if utils.Exists(spiderDir) {
+ if err := os.RemoveAll(spiderDir); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+ }
+ if err := os.MkdirAll(spiderDir, 0777); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+ spider.Src = spiderDir
+
+ // 添加爬虫到数据库
+ if err := spider.Add(); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ c.JSON(http.StatusOK, Response{
+ Status: "ok",
+ Message: "success",
+ Data: spider,
+ })
+}
+
+// 更改可配置爬虫
+func PostConfigSpider(c *gin.Context) {
+ PostSpider(c)
+}
+
+// 上传可配置爬虫Spiderfile
+func UploadConfigSpider(c *gin.Context) {
+ id := c.Param("id")
+
+ // 获取爬虫
+ var spider model.Spider
+ spider, err := model.GetSpider(bson.ObjectIdHex(id))
+ if err != nil {
+ HandleErrorF(http.StatusBadRequest, c, fmt.Sprintf("cannot find spider (id: %s)", id))
+ }
+
+ // 获取上传文件
+ file, header, err := c.Request.FormFile("file")
+ if err != nil {
+ HandleError(http.StatusBadRequest, c, err)
+ return
+ }
+
+ // 文件名称必须为Spiderfile
+ filename := header.Filename
+ if filename != "Spiderfile" {
+ HandleErrorF(http.StatusBadRequest, c, "filename must be 'Spiderfile'")
+ return
+ }
+
+ // 爬虫目录
+ spiderDir := filepath.Join(viper.GetString("spider.path"), spider.Name)
+
+ // 爬虫Spiderfile文件路径
+ sfPath := filepath.Join(spiderDir, filename)
+
+ // 创建(如果不存在)或打开Spiderfile(如果存在)
+ var f *os.File
+ if utils.Exists(sfPath) {
+ f, err = os.OpenFile(sfPath, os.O_WRONLY, 0777)
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ }
+ } else {
+ f, err = os.Create(sfPath)
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ }
+ }
+
+ // 将上传的文件拷贝到爬虫Spiderfile文件
+ _, err = io.Copy(f, file)
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ // 关闭Spiderfile文件
+ _ = f.Close()
+
+ // 构造配置数据
+ configData := entity.ConfigSpiderData{}
+
+ // 读取YAML文件
+ yamlFile, err := ioutil.ReadFile(sfPath)
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ // 反序列化
+ if err := yaml.Unmarshal(yamlFile, &configData); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ // 删除已有的爬虫文件
+ for _, fInfo := range utils.ListDir(spiderDir) {
+ // 不删除Spiderfile
+ if fInfo.Name() == filename {
+ continue
+ }
+
+ // 删除其他文件
+ if err := os.RemoveAll(filepath.Join(spiderDir, fInfo.Name())); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+ }
+
+ // 拷贝爬虫文件
+ tplDir := "./template/scrapy"
+ for _, fInfo := range utils.ListDir(tplDir) {
+ // 跳过Spiderfile
+ if fInfo.Name() == "Spiderfile" {
+ continue
+ }
+
+ srcPath := filepath.Join(tplDir, fInfo.Name())
+ if fInfo.IsDir() {
+ dirPath := filepath.Join(spiderDir, fInfo.Name())
+ if err := utils.CopyDir(srcPath, dirPath); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+ } else {
+ if err := utils.CopyFile(srcPath, filepath.Join(spiderDir, fInfo.Name())); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+ }
+ }
+
+ // 更改爬虫文件
+ if err := services.GenerateConfigSpiderFiles(spider, configData); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ // 打包为 zip 文件
+ files, err := utils.GetFilesFromDir(spiderDir)
+ if err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+ randomId := uuid.NewV4()
+ tmpFilePath := filepath.Join(viper.GetString("other.tmppath"), spider.Name+"."+randomId.String()+".zip")
+ spiderZipFileName := spider.Name + ".zip"
+ if err := utils.Compress(files, tmpFilePath); err != nil {
+ HandleError(http.StatusInternalServerError, c, err)
+ return
+ }
+
+ // 获取 GridFS 实例
+ s, gf := database.GetGridFs("files")
+ defer s.Close()
+
+ // 判断文件是否已经存在
+ var gfFile model.GridFs
+ if err := gf.Find(bson.M{"filename": spiderZipFileName}).One(&gfFile); err == nil {
+ // 已经存在文件,则删除
+ _ = gf.RemoveId(gfFile.Id)
+ }
+
+ // 上传到GridFs
+ fid, err := services.UploadToGridFs(spiderZipFileName, tmpFilePath)
+ if err != nil {
+ log.Errorf("upload to grid fs error: %s", err.Error())
+ debug.PrintStack()
+ return
+ }
+
+ // 保存爬虫 FileId
+ spider.FileId = fid
+ _ = spider.Save()
+
+ c.JSON(http.StatusOK, Response{
+ Status: "ok",
+ Message: "success",
+ })
+}
diff --git a/backend/routes/spider.go b/backend/routes/spider.go
index 4c26fcee..d351f1bb 100644
--- a/backend/routes/spider.go
+++ b/backend/routes/spider.go
@@ -153,6 +153,7 @@ func PutSpider(c *gin.Context) {
return
}
+ // 获取 GridFS 实例
s, gf := database.GetGridFs("files")
defer s.Close()
diff --git a/backend/routes/task.go b/backend/routes/task.go
index c84ea210..9c0aa43f 100644
--- a/backend/routes/task.go
+++ b/backend/routes/task.go
@@ -36,7 +36,7 @@ func GetTaskList(c *gin.Context) {
data.PageNum = 1
}
if data.PageSize == 0 {
- data.PageNum = 10
+ data.PageSize = 10
}
// 过滤条件
diff --git a/backend/services/config_spider.go b/backend/services/config_spider.go
new file mode 100644
index 00000000..4e8005a1
--- /dev/null
+++ b/backend/services/config_spider.go
@@ -0,0 +1,118 @@
+package services
+
+import (
+ "crawlab/constants"
+ "crawlab/entity"
+ "crawlab/model"
+ "crawlab/model/config_spider"
+ "errors"
+ "fmt"
+ "strings"
+)
+
+func GenerateConfigSpiderFiles(spider model.Spider, configData entity.ConfigSpiderData) error {
+ // 校验Spiderfile正确性
+ if err := ValidateSpiderfile(configData); err != nil {
+ return err
+ }
+
+ // 构造代码生成器
+ generator := config_spider.ScrapyGenerator{
+ Spider: spider,
+ ConfigData: configData,
+ }
+
+ // 生成代码
+ if err := generator.Generate(); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// 验证Spiderfile
+func ValidateSpiderfile(configData entity.ConfigSpiderData) error {
+ // 获取所有字段
+ fields := config_spider.GetAllFields(configData)
+
+ // 校验是否存在 start_url
+ if configData.StartUrl == "" {
+ return errors.New("spiderfile start_url is empty")
+ }
+
+ // 校验是否存在 stages
+ if len(configData.Stages) == 0 {
+ return errors.New("spiderfile stages is empty")
+ }
+
+ // 校验stages
+ dict := map[string]int{}
+ for stageName, stage := range configData.Stages {
+ // stage 名称不能为空
+ if stageName == "" {
+ return errors.New("spiderfile stage name is empty")
+ }
+
+ // stage 名称不能为保留字符串
+ // NOTE: 如果有其他Engine,可以扩展,默认为Scrapy
+ if configData.Engine == "" || configData.Engine == constants.EngineScrapy {
+ if strings.Contains(constants.ScrapyProtectedStageNames, stageName) {
+ return errors.New(fmt.Sprintf("spiderfile stage name '%s' is protected", stageName))
+ }
+ } else if configData.Engine == constants.EngineColly {
+ return errors.New(fmt.Sprintf("engine '%s' is not implemented", stageName))
+ }
+
+ // stage 名称不能重复
+ if dict[stageName] == 1 {
+ return errors.New("spiderfile stage name should be unique")
+ }
+ dict[stageName] = 1
+
+ // stage 字段不能为空
+ if len(stage.Fields) == 0 {
+ return errors.New(fmt.Sprintf("spiderfile stage '%s' has no fields", stageName))
+ }
+
+ // stage 的下一个 stage 只能有一个
+ hasNextStage := false
+ for _, field := range stage.Fields {
+ if field.NextStage != "" {
+ if hasNextStage {
+ return errors.New("spiderfile stage fields should have only 1 next_stage")
+ }
+ hasNextStage = true
+ }
+ }
+
+ // 如果 stage 的 is_list 为 true 但 list_css 为空,报错
+ if stage.IsList && stage.ListCss == "" {
+ return errors.New("spiderfile stage with is_list = true should have list_css being set")
+ }
+ }
+
+ // 校验字段唯一性
+ if !IsUniqueConfigSpiderFields(fields) {
+ return errors.New("spiderfile fields not unique")
+ }
+
+ // 字段名称不能为保留字符串
+ for _, field := range fields {
+ if strings.Contains(constants.ScrapyProtectedFieldNames, field.Name) {
+ return errors.New(fmt.Sprintf("spiderfile field name '%s' is protected", field.Name))
+ }
+ }
+
+ return nil
+}
+
+func IsUniqueConfigSpiderFields(fields []entity.Field) bool {
+ dict := map[string]int{}
+ for _, field := range fields {
+ if dict[field.Name] == 1 {
+ return false
+ }
+ dict[field.Name] = 1
+ }
+ return true
+}
diff --git a/backend/services/node.go b/backend/services/node.go
index dffe5ac9..be916f10 100644
--- a/backend/services/node.go
+++ b/backend/services/node.go
@@ -258,7 +258,7 @@ func InitNodeService() error {
return err
}
- // 如果为主节点,每30秒刷新所有节点信息
+ // 如果为主节点,每10秒刷新所有节点信息
if model.IsMaster() {
spec := "*/10 * * * * *"
if _, err := c.AddFunc(spec, UpdateNodeStatus); err != nil {
diff --git a/backend/services/register/register.go b/backend/services/register/register.go
index ccd8b67d..ed4e1891 100644
--- a/backend/services/register/register.go
+++ b/backend/services/register/register.go
@@ -6,6 +6,7 @@ import (
"net"
"reflect"
"runtime/debug"
+ "sync"
)
type Register interface {
@@ -97,25 +98,31 @@ func getMac() (string, error) {
var register Register
// 获得注册器
-func GetRegister() Register {
- if register != nil {
- return register
- }
+var once sync.Once
- registerType := viper.GetString("server.register.type")
- if registerType == "mac" {
- register = &MacRegister{}
- } else {
- ip := viper.GetString("server.register.ip")
- if ip == "" {
- log.Error("server.register.ip is empty")
- debug.PrintStack()
- return nil
+func GetRegister() Register {
+ once.Do(func() {
+
+ if register != nil {
+ register = register
}
- register = &IpRegister{
- Ip: ip,
+
+ registerType := viper.GetString("server.register.type")
+ if registerType == "mac" {
+ register = &MacRegister{}
+ } else {
+ ip := viper.GetString("server.register.ip")
+ if ip == "" {
+ log.Error("server.register.ip is empty")
+ debug.PrintStack()
+ register = nil
+ }
+ register = &IpRegister{
+ Ip: ip,
+ }
}
- }
- log.Info("register type is :" + reflect.TypeOf(register).String())
+ log.Info("register type is :" + reflect.TypeOf(register).String())
+
+ })
return register
}
diff --git a/backend/services/spider.go b/backend/services/spider.go
index 84d218bb..aa97b4ad 100644
--- a/backend/services/spider.go
+++ b/backend/services/spider.go
@@ -116,12 +116,20 @@ func PublishAllSpiders() {
// 发布爬虫
func PublishSpider(spider model.Spider) {
- // 查询gf file,不存在则删除
+ // 查询gf file,不存在则标记为爬虫文件不存在
gfFile := model.GetGridFs(spider.FileId)
if gfFile == nil {
- _ = model.RemoveSpider(spider.Id)
+ spider.FileId = constants.ObjectIdNull
+ _ = spider.Save()
return
}
+
+ // 如果FileId为空,表示还没有上传爬虫到GridFS,则跳过
+ if spider.FileId == bson.ObjectIdHex(constants.ObjectIdNull) {
+ return
+ }
+
+ // 获取爬虫同步实例
spiderSync := spider_handler.SpiderSync{
Spider: spider,
}
diff --git a/backend/services/task.go b/backend/services/task.go
index 67c3396b..d6c392ca 100644
--- a/backend/services/task.go
+++ b/backend/services/task.go
@@ -224,7 +224,16 @@ func ExecuteShellCmd(cmdStr string, cwd string, t model.Task, s model.Spider) (e
}
// 环境变量配置
- cmd = SetEnv(cmd, s.Envs, t.Id, s.Col)
+ envs := s.Envs
+ if s.Type == constants.Configurable {
+ envs = append(envs, model.Env{Name: "CRAWLAB_MONGO_HOST", Value: viper.GetString("mongo.host")})
+ envs = append(envs, model.Env{Name: "CRAWLAB_MONGO_PORT", Value: viper.GetString("mongo.port")})
+ envs = append(envs, model.Env{Name: "CRAWLAB_MONGO_DB", Value: viper.GetString("mongo.db")})
+ envs = append(envs, model.Env{Name: "CRAWLAB_MONGO_USERNAME", Value: viper.GetString("mongo.username")})
+ envs = append(envs, model.Env{Name: "CRAWLAB_MONGO_PASSWORD", Value: viper.GetString("mongo.password")})
+ envs = append(envs, model.Env{Name: "CRAWLAB_MONGO_AUTHSOURCE", Value: viper.GetString("mongo.authSource")})
+ }
+ cmd = SetEnv(cmd, envs, t.Id, s.Col)
// 起一个goroutine来监控进程
ch := utils.TaskExecChanMap.ChanBlocked(t.Id)
@@ -378,7 +387,14 @@ func ExecuteTask(id int) {
)
// 执行命令
- cmd := spider.Cmd
+ var cmd string
+ if spider.Type == constants.Configurable {
+ // 可配置爬虫命令
+ cmd = "scrapy crawl config_spider"
+ } else {
+ // 自定义爬虫命令
+ cmd = spider.Cmd
+ }
// 加入参数
if t.Param != "" {
diff --git a/backend/template/Spiderfile b/backend/template/Spiderfile
new file mode 100644
index 00000000..8d0e05cf
--- /dev/null
+++ b/backend/template/Spiderfile
@@ -0,0 +1,25 @@
+version: "0.4.0"
+name: "toscrapy_books"
+start_url: "http://books.toscrape.com"
+start_stage: "list"
+engine: "scrapy"
+stages:
+ list:
+ is_list: true # default: false
+ list_css: "section article.product_pod"
+ page_css: "ul.pager li.next a"
+ page_attr: "href" # default: href
+ fields:
+ - name: "title"
+ css: "h3 > a"
+ - name: "url"
+ css: "h3 > a"
+ attr: "href"
+ next_stage: "detail"
+ - name: "price"
+ css: ".product_price > .price_color"
+ detail:
+ is_list: false
+ fields:
+ - name: "description"
+ css: "#product_description + p"
diff --git a/backend/template/scrapy/config_spider/__init__.py b/backend/template/scrapy/config_spider/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/backend/template/scrapy/config_spider/items.py b/backend/template/scrapy/config_spider/items.py
new file mode 100644
index 00000000..16681a52
--- /dev/null
+++ b/backend/template/scrapy/config_spider/items.py
@@ -0,0 +1,12 @@
+# -*- coding: utf-8 -*-
+
+# Define here the models for your scraped items
+#
+# See documentation in:
+# https://docs.scrapy.org/en/latest/topics/items.html
+
+import scrapy
+
+
+class Item(scrapy.Item):
+###ITEMS###
diff --git a/backend/template/scrapy/config_spider/middlewares.py b/backend/template/scrapy/config_spider/middlewares.py
new file mode 100644
index 00000000..e864bd0b
--- /dev/null
+++ b/backend/template/scrapy/config_spider/middlewares.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+
+# Define here the models for your spider middleware
+#
+# See documentation in:
+# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
+
+from scrapy import signals
+
+
+class ConfigSpiderSpiderMiddleware(object):
+ # Not all methods need to be defined. If a method is not defined,
+ # scrapy acts as if the spider middleware does not modify the
+ # passed objects.
+
+ @classmethod
+ def from_crawler(cls, crawler):
+ # This method is used by Scrapy to create your spiders.
+ s = cls()
+ crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
+ return s
+
+ def process_spider_input(self, response, spider):
+ # Called for each response that goes through the spider
+ # middleware and into the spider.
+
+ # Should return None or raise an exception.
+ return None
+
+ def process_spider_output(self, response, result, spider):
+ # Called with the results returned from the Spider, after
+ # it has processed the response.
+
+ # Must return an iterable of Request, dict or Item objects.
+ for i in result:
+ yield i
+
+ def process_spider_exception(self, response, exception, spider):
+ # Called when a spider or process_spider_input() method
+ # (from other spider middleware) raises an exception.
+
+ # Should return either None or an iterable of Request, dict
+ # or Item objects.
+ pass
+
+ def process_start_requests(self, start_requests, spider):
+ # Called with the start requests of the spider, and works
+ # similarly to the process_spider_output() method, except
+ # that it doesn’t have a response associated.
+
+ # Must return only requests (not items).
+ for r in start_requests:
+ yield r
+
+ def spider_opened(self, spider):
+ spider.logger.info('Spider opened: %s' % spider.name)
+
+
+class ConfigSpiderDownloaderMiddleware(object):
+ # Not all methods need to be defined. If a method is not defined,
+ # scrapy acts as if the downloader middleware does not modify the
+ # passed objects.
+
+ @classmethod
+ def from_crawler(cls, crawler):
+ # This method is used by Scrapy to create your spiders.
+ s = cls()
+ crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
+ return s
+
+ def process_request(self, request, spider):
+ # Called for each request that goes through the downloader
+ # middleware.
+
+ # Must either:
+ # - return None: continue processing this request
+ # - or return a Response object
+ # - or return a Request object
+ # - or raise IgnoreRequest: process_exception() methods of
+ # installed downloader middleware will be called
+ return None
+
+ def process_response(self, request, response, spider):
+ # Called with the response returned from the downloader.
+
+ # Must either;
+ # - return a Response object
+ # - return a Request object
+ # - or raise IgnoreRequest
+ return response
+
+ def process_exception(self, request, exception, spider):
+ # Called when a download handler or a process_request()
+ # (from other downloader middleware) raises an exception.
+
+ # Must either:
+ # - return None: continue processing this exception
+ # - return a Response object: stops process_exception() chain
+ # - return a Request object: stops process_exception() chain
+ pass
+
+ def spider_opened(self, spider):
+ spider.logger.info('Spider opened: %s' % spider.name)
diff --git a/backend/template/scrapy/config_spider/pipelines.py b/backend/template/scrapy/config_spider/pipelines.py
new file mode 100644
index 00000000..69af4c85
--- /dev/null
+++ b/backend/template/scrapy/config_spider/pipelines.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+# Define your item pipelines here
+#
+# Don't forget to add your pipeline to the ITEM_PIPELINES setting
+# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
+
+import os
+from pymongo import MongoClient
+
+mongo = MongoClient(
+ host=os.environ.get('CRAWLAB_MONGO_HOST') or 'localhost',
+ port=int(os.environ.get('CRAWLAB_MONGO_PORT') or 27017),
+ username=os.environ.get('CRAWLAB_MONGO_USERNAME'),
+ password=os.environ.get('CRAWLAB_MONGO_PASSWORD'),
+ authSource=os.environ.get('CRAWLAB_MONGO_AUTHSOURCE') or 'admin'
+)
+db = mongo[os.environ.get('CRAWLAB_MONGO_DB') or 'test']
+col = db[os.environ.get('CRAWLAB_COLLECTION') or 'test']
+task_id = os.environ.get('CRAWLAB_TASK_ID')
+
+class ConfigSpiderPipeline(object):
+ def process_item(self, item, spider):
+ item['task_id'] = task_id
+ if col is not None:
+ col.save(item)
+ return item
diff --git a/backend/template/scrapy/config_spider/settings.py b/backend/template/scrapy/config_spider/settings.py
new file mode 100644
index 00000000..a0112373
--- /dev/null
+++ b/backend/template/scrapy/config_spider/settings.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+# Scrapy settings for config_spider project
+#
+# For simplicity, this file contains only settings considered important or
+# commonly used. You can find more settings consulting the documentation:
+#
+# https://docs.scrapy.org/en/latest/topics/settings.html
+# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
+# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
+
+BOT_NAME = 'config_spider'
+
+SPIDER_MODULES = ['config_spider.spiders']
+NEWSPIDER_MODULE = 'config_spider.spiders'
+
+
+# Crawl responsibly by identifying yourself (and your website) on the user-agent
+#USER_AGENT = 'config_spider (+http://www.yourdomain.com)'
+
+# Obey robots.txt rules
+ROBOTSTXT_OBEY = True
+
+# Configure maximum concurrent requests performed by Scrapy (default: 16)
+#CONCURRENT_REQUESTS = 32
+
+# Configure a delay for requests for the same website (default: 0)
+# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
+# See also autothrottle settings and docs
+#DOWNLOAD_DELAY = 3
+# The download delay setting will honor only one of:
+#CONCURRENT_REQUESTS_PER_DOMAIN = 16
+#CONCURRENT_REQUESTS_PER_IP = 16
+
+# Disable cookies (enabled by default)
+#COOKIES_ENABLED = False
+
+# Disable Telnet Console (enabled by default)
+#TELNETCONSOLE_ENABLED = False
+
+# Override the default request headers:
+#DEFAULT_REQUEST_HEADERS = {
+# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
+# 'Accept-Language': 'en',
+#}
+
+# Enable or disable spider middlewares
+# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
+#SPIDER_MIDDLEWARES = {
+# 'config_spider.middlewares.ConfigSpiderSpiderMiddleware': 543,
+#}
+
+# Enable or disable downloader middlewares
+# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
+#DOWNLOADER_MIDDLEWARES = {
+# 'config_spider.middlewares.ConfigSpiderDownloaderMiddleware': 543,
+#}
+
+# Enable or disable extensions
+# See https://docs.scrapy.org/en/latest/topics/extensions.html
+#EXTENSIONS = {
+# 'scrapy.extensions.telnet.TelnetConsole': None,
+#}
+
+# Configure item pipelines
+# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
+ITEM_PIPELINES = {
+ 'config_spider.pipelines.ConfigSpiderPipeline': 300,
+}
+
+# Enable and configure the AutoThrottle extension (disabled by default)
+# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
+#AUTOTHROTTLE_ENABLED = True
+# The initial download delay
+#AUTOTHROTTLE_START_DELAY = 5
+# The maximum download delay to be set in case of high latencies
+#AUTOTHROTTLE_MAX_DELAY = 60
+# The average number of requests Scrapy should be sending in parallel to
+# each remote server
+#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
+# Enable showing throttling stats for every response received:
+#AUTOTHROTTLE_DEBUG = False
+
+# Enable and configure HTTP caching (disabled by default)
+# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
+#HTTPCACHE_ENABLED = True
+#HTTPCACHE_EXPIRATION_SECS = 0
+#HTTPCACHE_DIR = 'httpcache'
+#HTTPCACHE_IGNORE_HTTP_CODES = []
+#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
diff --git a/backend/template/scrapy/config_spider/spiders/__init__.py b/backend/template/scrapy/config_spider/spiders/__init__.py
new file mode 100644
index 00000000..ebd689ac
--- /dev/null
+++ b/backend/template/scrapy/config_spider/spiders/__init__.py
@@ -0,0 +1,4 @@
+# This package will contain the spiders of your Scrapy project
+#
+# Please refer to the documentation for information on how to create and manage
+# your spiders.
diff --git a/backend/template/scrapy/config_spider/spiders/spider.py b/backend/template/scrapy/config_spider/spiders/spider.py
new file mode 100644
index 00000000..0e3c661d
--- /dev/null
+++ b/backend/template/scrapy/config_spider/spiders/spider.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+import scrapy
+import re
+from config_spider.items import Item
+from urllib.parse import urljoin
+
+def get_real_url(response, url):
+ if re.search(r'^https?|^\/\/', url):
+ return url
+ return urljoin(response.url, url)
+
+class ConfigSpider(scrapy.Spider):
+ name = 'config_spider'
+
+ def start_requests(self):
+ yield scrapy.Request(url='###START_URL###', callback=self.###START_STAGE###)
+
+###PARSERS###
diff --git a/backend/template/scrapy/scrapy.cfg b/backend/template/scrapy/scrapy.cfg
new file mode 100644
index 00000000..a78d91e3
--- /dev/null
+++ b/backend/template/scrapy/scrapy.cfg
@@ -0,0 +1,11 @@
+# Automatically created by: scrapy startproject
+#
+# For more information about the [deploy] section see:
+# https://scrapyd.readthedocs.io/en/latest/deploy.html
+
+[settings]
+default = config_spider.settings
+
+[deploy]
+#url = http://localhost:6800/
+project = config_spider
diff --git a/backend/utils/file.go b/backend/utils/file.go
index babc0d69..2dacc9ed 100644
--- a/backend/utils/file.go
+++ b/backend/utils/file.go
@@ -3,11 +3,15 @@ package utils
import (
"archive/zip"
"bufio"
+ "fmt"
"github.com/apex/log"
"io"
+ "io/ioutil"
"os"
+ "path"
"path/filepath"
"runtime/debug"
+ "strings"
)
// 删除文件
@@ -71,6 +75,16 @@ func IsDir(path string) bool {
return s.IsDir()
}
+func ListDir(path string) []os.FileInfo {
+ list, err := ioutil.ReadDir(path)
+ if err != nil {
+ log.Errorf(err.Error())
+ debug.PrintStack()
+ return nil
+ }
+ return list
+}
+
// 判断所给路径是否为文件
func IsFile(path string) bool {
return !IsDir(path)
@@ -185,8 +199,7 @@ func Compress(files []*os.File, dest string) error {
w := zip.NewWriter(d)
defer Close(w)
for _, file := range files {
- err := _Compress(file, "", w)
- if err != nil {
+ if err := _Compress(file, "", w); err != nil {
return err
}
}
@@ -239,3 +252,128 @@ func _Compress(file *os.File, prefix string, zw *zip.Writer) error {
}
return nil
}
+
+func GetFilesFromDir(dirPath string) ([]*os.File, error) {
+ var res []*os.File
+ for _, fInfo := range ListDir(dirPath) {
+ f, err := os.Open(filepath.Join(dirPath, fInfo.Name()))
+ if err != nil {
+ return res, err
+ }
+ res = append(res, f)
+ }
+ return res, nil
+}
+
+func GetAllFilesFromDir(dirPath string) ([]*os.File, error) {
+ var res []*os.File
+ if err := filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error {
+ if !IsDir(path) {
+ f, err2 := os.Open(path)
+ if err2 != nil {
+ return err
+ }
+ res = append(res, f)
+ }
+ return nil
+ }); err != nil {
+ log.Error(err.Error())
+ debug.PrintStack()
+ return res, err
+ }
+ return res, nil
+}
+
+// File copies a single file from src to dst
+func CopyFile(src, dst string) error {
+ var err error
+ var srcFd *os.File
+ var dstFd *os.File
+ var srcInfo os.FileInfo
+
+ if srcFd, err = os.Open(src); err != nil {
+ return err
+ }
+ defer srcFd.Close()
+
+ if dstFd, err = os.Create(dst); err != nil {
+ return err
+ }
+ defer dstFd.Close()
+
+ if _, err = io.Copy(dstFd, srcFd); err != nil {
+ return err
+ }
+ if srcInfo, err = os.Stat(src); err != nil {
+ return err
+ }
+ return os.Chmod(dst, srcInfo.Mode())
+}
+
+// Dir copies a whole directory recursively
+func CopyDir(src string, dst string) error {
+ var err error
+ var fds []os.FileInfo
+ var srcInfo os.FileInfo
+
+ if srcInfo, err = os.Stat(src); err != nil {
+ return err
+ }
+
+ if err = os.MkdirAll(dst, srcInfo.Mode()); err != nil {
+ return err
+ }
+
+ if fds, err = ioutil.ReadDir(src); err != nil {
+ return err
+ }
+ for _, fd := range fds {
+ srcfp := path.Join(src, fd.Name())
+ dstfp := path.Join(dst, fd.Name())
+
+ if fd.IsDir() {
+ if err = CopyDir(srcfp, dstfp); err != nil {
+ fmt.Println(err)
+ }
+ } else {
+ if err = CopyFile(srcfp, dstfp); err != nil {
+ fmt.Println(err)
+ }
+ }
+ }
+ return nil
+}
+
+// 设置文件变量值
+// 可以理解为将文件中的变量占位符替换为想要设置的值
+func SetFileVariable(filePath string, key string, value string) error {
+ // 占位符标识
+ sep := "###"
+
+ // 读取文件到字节
+ contentBytes, err := ioutil.ReadFile(filePath)
+ if err != nil {
+ return err
+ }
+
+ // 将字节转化为文本
+ content := string(contentBytes)
+
+ // 替换文本
+ content = strings.Replace(content, fmt.Sprintf("%s%s%s", sep, key, sep), value, -1)
+
+ // 打开文件
+ f, err := os.OpenFile(filePath, os.O_WRONLY|os.O_TRUNC, 0777)
+ if err != nil {
+ return err
+ }
+
+ // 将替换后的内容写入文件
+ if _, err := f.Write([]byte(content)); err != nil {
+ return err
+ }
+
+ f.Close()
+
+ return nil
+}
diff --git a/backend/vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go b/backend/vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go
deleted file mode 100644
index 3525a004..00000000
--- a/backend/vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go
+++ /dev/null
@@ -1,294 +0,0 @@
-// +build ignore
-
-package main
-
-import (
- "bytes"
- "fmt"
- "go/format"
- "html/template"
- "io/ioutil"
- "log"
- "path/filepath"
- "strings"
-
- "github.com/globalsign/mgo/internal/json"
-)
-
-func main() {
- log.SetFlags(0)
- log.SetPrefix(name + ": ")
-
- var g Generator
-
- fmt.Fprintf(&g, "// Code generated by \"%s.go\"; DO NOT EDIT\n\n", name)
-
- src := g.generate()
-
- err := ioutil.WriteFile(fmt.Sprintf("%s.go", strings.TrimSuffix(name, "_generator")), src, 0644)
- if err != nil {
- log.Fatalf("writing output: %s", err)
- }
-}
-
-// Generator holds the state of the analysis. Primarily used to buffer
-// the output for format.Source.
-type Generator struct {
- bytes.Buffer // Accumulated output.
-}
-
-// format returns the gofmt-ed contents of the Generator's buffer.
-func (g *Generator) format() []byte {
- src, err := format.Source(g.Bytes())
- if err != nil {
- // Should never happen, but can arise when developing this code.
- // The user can compile the output to see the error.
- log.Printf("warning: internal error: invalid Go generated: %s", err)
- log.Printf("warning: compile the package to analyze the error")
- return g.Bytes()
- }
- return src
-}
-
-// EVERYTHING ABOVE IS CONSTANT BETWEEN THE GENERATORS
-
-const name = "bson_corpus_spec_test_generator"
-
-func (g *Generator) generate() []byte {
-
- testFiles, err := filepath.Glob("./specdata/specifications/source/bson-corpus/tests/*.json")
- if err != nil {
- log.Fatalf("error reading bson-corpus files: %s", err)
- }
-
- tests, err := g.loadTests(testFiles)
- if err != nil {
- log.Fatalf("error loading tests: %s", err)
- }
-
- tmpl, err := g.getTemplate()
- if err != nil {
- log.Fatalf("error loading template: %s", err)
- }
-
- tmpl.Execute(&g.Buffer, tests)
-
- return g.format()
-}
-
-func (g *Generator) loadTests(filenames []string) ([]*testDef, error) {
- var tests []*testDef
- for _, filename := range filenames {
- test, err := g.loadTest(filename)
- if err != nil {
- return nil, err
- }
-
- tests = append(tests, test)
- }
-
- return tests, nil
-}
-
-func (g *Generator) loadTest(filename string) (*testDef, error) {
- content, err := ioutil.ReadFile(filename)
- if err != nil {
- return nil, err
- }
-
- var testDef testDef
- err = json.Unmarshal(content, &testDef)
- if err != nil {
- return nil, err
- }
-
- names := make(map[string]struct{})
-
- for i := len(testDef.Valid) - 1; i >= 0; i-- {
- if testDef.BsonType == "0x05" && testDef.Valid[i].Description == "subtype 0x02" {
- testDef.Valid = append(testDef.Valid[:i], testDef.Valid[i+1:]...)
- continue
- }
-
- name := cleanupFuncName(testDef.Description + "_" + testDef.Valid[i].Description)
- nameIdx := name
- j := 1
- for {
- if _, ok := names[nameIdx]; !ok {
- break
- }
-
- nameIdx = fmt.Sprintf("%s_%d", name, j)
- }
-
- names[nameIdx] = struct{}{}
-
- testDef.Valid[i].TestDef = &testDef
- testDef.Valid[i].Name = nameIdx
- testDef.Valid[i].StructTest = testDef.TestKey != "" &&
- (testDef.BsonType != "0x05" || strings.Contains(testDef.Valid[i].Description, "0x00")) &&
- !testDef.Deprecated
- }
-
- for i := len(testDef.DecodeErrors) - 1; i >= 0; i-- {
- if strings.Contains(testDef.DecodeErrors[i].Description, "UTF-8") {
- testDef.DecodeErrors = append(testDef.DecodeErrors[:i], testDef.DecodeErrors[i+1:]...)
- continue
- }
-
- name := cleanupFuncName(testDef.Description + "_" + testDef.DecodeErrors[i].Description)
- nameIdx := name
- j := 1
- for {
- if _, ok := names[nameIdx]; !ok {
- break
- }
-
- nameIdx = fmt.Sprintf("%s_%d", name, j)
- }
- names[nameIdx] = struct{}{}
-
- testDef.DecodeErrors[i].Name = nameIdx
- }
-
- return &testDef, nil
-}
-
-func (g *Generator) getTemplate() (*template.Template, error) {
- content := `package bson_test
-
-import (
- "encoding/hex"
- "time"
-
- . "gopkg.in/check.v1"
- "github.com/globalsign/mgo/bson"
-)
-
-func testValid(c *C, in []byte, expected []byte, result interface{}) {
- err := bson.Unmarshal(in, result)
- c.Assert(err, IsNil)
-
- out, err := bson.Marshal(result)
- c.Assert(err, IsNil)
-
- c.Assert(string(expected), Equals, string(out), Commentf("roundtrip failed for %T, expected '%x' but got '%x'", result, expected, out))
-}
-
-func testDecodeSkip(c *C, in []byte) {
- err := bson.Unmarshal(in, &struct{}{})
- c.Assert(err, IsNil)
-}
-
-func testDecodeError(c *C, in []byte, result interface{}) {
- err := bson.Unmarshal(in, result)
- c.Assert(err, Not(IsNil))
-}
-
-{{range .}}
-{{range .Valid}}
-func (s *S) Test{{.Name}}(c *C) {
- b, err := hex.DecodeString("{{.Bson}}")
- c.Assert(err, IsNil)
-
- {{if .CanonicalBson}}
- cb, err := hex.DecodeString("{{.CanonicalBson}}")
- c.Assert(err, IsNil)
- {{else}}
- cb := b
- {{end}}
-
- var resultD bson.D
- testValid(c, b, cb, &resultD)
- {{if .StructTest}}var resultS struct {
- Element {{.TestDef.GoType}} ` + "`bson:\"{{.TestDef.TestKey}}\"`" + `
- }
- testValid(c, b, cb, &resultS){{end}}
-
- testDecodeSkip(c, b)
-}
-{{end}}
-
-{{range .DecodeErrors}}
-func (s *S) Test{{.Name}}(c *C) {
- b, err := hex.DecodeString("{{.Bson}}")
- c.Assert(err, IsNil)
-
- var resultD bson.D
- testDecodeError(c, b, &resultD)
-}
-{{end}}
-{{end}}
-`
- tmpl, err := template.New("").Parse(content)
- if err != nil {
- return nil, err
- }
- return tmpl, nil
-}
-
-func cleanupFuncName(name string) string {
- return strings.Map(func(r rune) rune {
- if (r >= 48 && r <= 57) || (r >= 65 && r <= 90) || (r >= 97 && r <= 122) {
- return r
- }
- return '_'
- }, name)
-}
-
-type testDef struct {
- Description string `json:"description"`
- BsonType string `json:"bson_type"`
- TestKey string `json:"test_key"`
- Valid []*valid `json:"valid"`
- DecodeErrors []*decodeError `json:"decodeErrors"`
- Deprecated bool `json:"deprecated"`
-}
-
-func (t *testDef) GoType() string {
- switch t.BsonType {
- case "0x01":
- return "float64"
- case "0x02":
- return "string"
- case "0x03":
- return "bson.D"
- case "0x04":
- return "[]interface{}"
- case "0x05":
- return "[]byte"
- case "0x07":
- return "bson.ObjectId"
- case "0x08":
- return "bool"
- case "0x09":
- return "time.Time"
- case "0x0E":
- return "string"
- case "0x10":
- return "int32"
- case "0x12":
- return "int64"
- case "0x13":
- return "bson.Decimal"
- default:
- return "interface{}"
- }
-}
-
-type valid struct {
- Description string `json:"description"`
- Bson string `json:"bson"`
- CanonicalBson string `json:"canonical_bson"`
-
- Name string
- StructTest bool
- TestDef *testDef
-}
-
-type decodeError struct {
- Description string `json:"description"`
- Bson string `json:"bson"`
-
- Name string
-}
diff --git a/backend/vendor/github.com/go-playground/locales/.gitignore b/backend/vendor/github.com/go-playground/locales/.gitignore
new file mode 100644
index 00000000..daf913b1
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/locales/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
diff --git a/backend/vendor/github.com/go-playground/locales/LICENSE b/backend/vendor/github.com/go-playground/locales/LICENSE
new file mode 100644
index 00000000..75854ac4
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/locales/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Go Playground
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/backend/vendor/github.com/go-playground/locales/README.md b/backend/vendor/github.com/go-playground/locales/README.md
new file mode 100644
index 00000000..43329f8d
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/locales/README.md
@@ -0,0 +1,172 @@
+## locales
+
+[](https://semaphoreci.com/joeybloggs/locales)
+[](https://goreportcard.com/report/github.com/go-playground/locales)
+[](https://godoc.org/github.com/go-playground/locales)
+
+[](https://gitter.im/go-playground/locales?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
+
+Locales is a set of locales generated from the [Unicode CLDR Project](http://cldr.unicode.org/) which can be used independently or within
+an i18n package; these were built for use with, but not exclusive to, [Universal Translator](https://github.com/go-playground/universal-translator).
+
+Features
+--------
+- [x] Rules generated from the latest [CLDR](http://cldr.unicode.org/index/downloads) data, v31.0.1
+- [x] Contains Cardinal, Ordinal and Range Plural Rules
+- [x] Contains Month, Weekday and Timezone translations built in
+- [x] Contains Date & Time formatting functions
+- [x] Contains Number, Currency, Accounting and Percent formatting functions
+- [x] Supports the "Gregorian" calendar only ( my time isn't unlimited, had to draw the line somewhere )
+
+Full Tests
+--------------------
+I could sure use your help adding tests for every locale, it is a huge undertaking and I just don't have the free time to do it all at the moment;
+any help would be **greatly appreciated!!!!** please see [issue](https://github.com/go-playground/locales/issues/1) for details.
+
+Installation
+-----------
+
+Use go get
+
+```shell
+go get github.com/go-playground/locales
+```
+
+NOTES
+--------
+You'll notice most return types are []byte, this is because most of the time the results will be concatenated with a larger body
+of text and can avoid some allocations if already appending to a byte array, otherwise just cast as string.
+
+Usage
+-------
+```go
+package main
+
+import (
+ "fmt"
+ "time"
+
+ "github.com/go-playground/locales/currency"
+ "github.com/go-playground/locales/en_CA"
+)
+
+func main() {
+
+ loc, _ := time.LoadLocation("America/Toronto")
+ datetime := time.Date(2016, 02, 03, 9, 0, 1, 0, loc)
+
+ l := en_CA.New()
+
+ // Dates
+ fmt.Println(l.FmtDateFull(datetime))
+ fmt.Println(l.FmtDateLong(datetime))
+ fmt.Println(l.FmtDateMedium(datetime))
+ fmt.Println(l.FmtDateShort(datetime))
+
+ // Times
+ fmt.Println(l.FmtTimeFull(datetime))
+ fmt.Println(l.FmtTimeLong(datetime))
+ fmt.Println(l.FmtTimeMedium(datetime))
+ fmt.Println(l.FmtTimeShort(datetime))
+
+ // Months Wide
+ fmt.Println(l.MonthWide(time.January))
+ fmt.Println(l.MonthWide(time.February))
+ fmt.Println(l.MonthWide(time.March))
+ // ...
+
+ // Months Abbreviated
+ fmt.Println(l.MonthAbbreviated(time.January))
+ fmt.Println(l.MonthAbbreviated(time.February))
+ fmt.Println(l.MonthAbbreviated(time.March))
+ // ...
+
+ // Months Narrow
+ fmt.Println(l.MonthNarrow(time.January))
+ fmt.Println(l.MonthNarrow(time.February))
+ fmt.Println(l.MonthNarrow(time.March))
+ // ...
+
+ // Weekdays Wide
+ fmt.Println(l.WeekdayWide(time.Sunday))
+ fmt.Println(l.WeekdayWide(time.Monday))
+ fmt.Println(l.WeekdayWide(time.Tuesday))
+ // ...
+
+ // Weekdays Abbreviated
+ fmt.Println(l.WeekdayAbbreviated(time.Sunday))
+ fmt.Println(l.WeekdayAbbreviated(time.Monday))
+ fmt.Println(l.WeekdayAbbreviated(time.Tuesday))
+ // ...
+
+ // Weekdays Short
+ fmt.Println(l.WeekdayShort(time.Sunday))
+ fmt.Println(l.WeekdayShort(time.Monday))
+ fmt.Println(l.WeekdayShort(time.Tuesday))
+ // ...
+
+ // Weekdays Narrow
+ fmt.Println(l.WeekdayNarrow(time.Sunday))
+ fmt.Println(l.WeekdayNarrow(time.Monday))
+ fmt.Println(l.WeekdayNarrow(time.Tuesday))
+ // ...
+
+ var f64 float64
+
+ f64 = -10356.4523
+
+ // Number
+ fmt.Println(l.FmtNumber(f64, 2))
+
+ // Currency
+ fmt.Println(l.FmtCurrency(f64, 2, currency.CAD))
+ fmt.Println(l.FmtCurrency(f64, 2, currency.USD))
+
+ // Accounting
+ fmt.Println(l.FmtAccounting(f64, 2, currency.CAD))
+ fmt.Println(l.FmtAccounting(f64, 2, currency.USD))
+
+ f64 = 78.12
+
+ // Percent
+ fmt.Println(l.FmtPercent(f64, 0))
+
+ // Plural Rules for locale, so you know what rules you must cover
+ fmt.Println(l.PluralsCardinal())
+ fmt.Println(l.PluralsOrdinal())
+
+ // Cardinal Plural Rules
+ fmt.Println(l.CardinalPluralRule(1, 0))
+ fmt.Println(l.CardinalPluralRule(1.0, 0))
+ fmt.Println(l.CardinalPluralRule(1.0, 1))
+ fmt.Println(l.CardinalPluralRule(3, 0))
+
+ // Ordinal Plural Rules
+ fmt.Println(l.OrdinalPluralRule(21, 0)) // 21st
+ fmt.Println(l.OrdinalPluralRule(22, 0)) // 22nd
+ fmt.Println(l.OrdinalPluralRule(33, 0)) // 33rd
+ fmt.Println(l.OrdinalPluralRule(34, 0)) // 34th
+
+ // Range Plural Rules
+ fmt.Println(l.RangePluralRule(1, 0, 1, 0)) // 1-1
+ fmt.Println(l.RangePluralRule(1, 0, 2, 0)) // 1-2
+ fmt.Println(l.RangePluralRule(5, 0, 8, 0)) // 5-8
+}
+```
+
+NOTES:
+-------
+These rules were generated from the [Unicode CLDR Project](http://cldr.unicode.org/), if you encounter any issues
+I strongly encourage contributing to the CLDR project to get the locale information corrected and the next time
+these locales are regenerated the fix will come with.
+
+I do however realize that time constraints are often important and so there are two options:
+
+1. Create your own locale, copy, paste and modify, and ensure it complies with the `Translator` interface.
+2. Add an exception in the locale generation code directly and once regenerated, fix will be in place.
+
+Please to not make fixes inside the locale files, they WILL get overwritten when the locales are regenerated.
+
+License
+------
+Distributed under MIT License, please see license file in code for more details.
diff --git a/backend/vendor/github.com/go-playground/locales/currency/currency.go b/backend/vendor/github.com/go-playground/locales/currency/currency.go
new file mode 100644
index 00000000..cdaba596
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/locales/currency/currency.go
@@ -0,0 +1,308 @@
+package currency
+
+// Type is the currency type associated with the locales currency enum
+type Type int
+
+// locale currencies
+const (
+ ADP Type = iota
+ AED
+ AFA
+ AFN
+ ALK
+ ALL
+ AMD
+ ANG
+ AOA
+ AOK
+ AON
+ AOR
+ ARA
+ ARL
+ ARM
+ ARP
+ ARS
+ ATS
+ AUD
+ AWG
+ AZM
+ AZN
+ BAD
+ BAM
+ BAN
+ BBD
+ BDT
+ BEC
+ BEF
+ BEL
+ BGL
+ BGM
+ BGN
+ BGO
+ BHD
+ BIF
+ BMD
+ BND
+ BOB
+ BOL
+ BOP
+ BOV
+ BRB
+ BRC
+ BRE
+ BRL
+ BRN
+ BRR
+ BRZ
+ BSD
+ BTN
+ BUK
+ BWP
+ BYB
+ BYN
+ BYR
+ BZD
+ CAD
+ CDF
+ CHE
+ CHF
+ CHW
+ CLE
+ CLF
+ CLP
+ CNH
+ CNX
+ CNY
+ COP
+ COU
+ CRC
+ CSD
+ CSK
+ CUC
+ CUP
+ CVE
+ CYP
+ CZK
+ DDM
+ DEM
+ DJF
+ DKK
+ DOP
+ DZD
+ ECS
+ ECV
+ EEK
+ EGP
+ ERN
+ ESA
+ ESB
+ ESP
+ ETB
+ EUR
+ FIM
+ FJD
+ FKP
+ FRF
+ GBP
+ GEK
+ GEL
+ GHC
+ GHS
+ GIP
+ GMD
+ GNF
+ GNS
+ GQE
+ GRD
+ GTQ
+ GWE
+ GWP
+ GYD
+ HKD
+ HNL
+ HRD
+ HRK
+ HTG
+ HUF
+ IDR
+ IEP
+ ILP
+ ILR
+ ILS
+ INR
+ IQD
+ IRR
+ ISJ
+ ISK
+ ITL
+ JMD
+ JOD
+ JPY
+ KES
+ KGS
+ KHR
+ KMF
+ KPW
+ KRH
+ KRO
+ KRW
+ KWD
+ KYD
+ KZT
+ LAK
+ LBP
+ LKR
+ LRD
+ LSL
+ LTL
+ LTT
+ LUC
+ LUF
+ LUL
+ LVL
+ LVR
+ LYD
+ MAD
+ MAF
+ MCF
+ MDC
+ MDL
+ MGA
+ MGF
+ MKD
+ MKN
+ MLF
+ MMK
+ MNT
+ MOP
+ MRO
+ MTL
+ MTP
+ MUR
+ MVP
+ MVR
+ MWK
+ MXN
+ MXP
+ MXV
+ MYR
+ MZE
+ MZM
+ MZN
+ NAD
+ NGN
+ NIC
+ NIO
+ NLG
+ NOK
+ NPR
+ NZD
+ OMR
+ PAB
+ PEI
+ PEN
+ PES
+ PGK
+ PHP
+ PKR
+ PLN
+ PLZ
+ PTE
+ PYG
+ QAR
+ RHD
+ ROL
+ RON
+ RSD
+ RUB
+ RUR
+ RWF
+ SAR
+ SBD
+ SCR
+ SDD
+ SDG
+ SDP
+ SEK
+ SGD
+ SHP
+ SIT
+ SKK
+ SLL
+ SOS
+ SRD
+ SRG
+ SSP
+ STD
+ STN
+ SUR
+ SVC
+ SYP
+ SZL
+ THB
+ TJR
+ TJS
+ TMM
+ TMT
+ TND
+ TOP
+ TPE
+ TRL
+ TRY
+ TTD
+ TWD
+ TZS
+ UAH
+ UAK
+ UGS
+ UGX
+ USD
+ USN
+ USS
+ UYI
+ UYP
+ UYU
+ UZS
+ VEB
+ VEF
+ VND
+ VNN
+ VUV
+ WST
+ XAF
+ XAG
+ XAU
+ XBA
+ XBB
+ XBC
+ XBD
+ XCD
+ XDR
+ XEU
+ XFO
+ XFU
+ XOF
+ XPD
+ XPF
+ XPT
+ XRE
+ XSU
+ XTS
+ XUA
+ XXX
+ YDD
+ YER
+ YUD
+ YUM
+ YUN
+ YUR
+ ZAL
+ ZAR
+ ZMK
+ ZMW
+ ZRN
+ ZRZ
+ ZWD
+ ZWL
+ ZWR
+)
diff --git a/backend/vendor/github.com/go-playground/locales/logo.png b/backend/vendor/github.com/go-playground/locales/logo.png
new file mode 100644
index 00000000..3038276e
Binary files /dev/null and b/backend/vendor/github.com/go-playground/locales/logo.png differ
diff --git a/backend/vendor/github.com/go-playground/locales/rules.go b/backend/vendor/github.com/go-playground/locales/rules.go
new file mode 100644
index 00000000..92029001
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/locales/rules.go
@@ -0,0 +1,293 @@
+package locales
+
+import (
+ "strconv"
+ "time"
+
+ "github.com/go-playground/locales/currency"
+)
+
+// // ErrBadNumberValue is returned when the number passed for
+// // plural rule determination cannot be parsed
+// type ErrBadNumberValue struct {
+// NumberValue string
+// InnerError error
+// }
+
+// // Error returns ErrBadNumberValue error string
+// func (e *ErrBadNumberValue) Error() string {
+// return fmt.Sprintf("Invalid Number Value '%s' %s", e.NumberValue, e.InnerError)
+// }
+
+// var _ error = new(ErrBadNumberValue)
+
+// PluralRule denotes the type of plural rules
+type PluralRule int
+
+// PluralRule's
+const (
+ PluralRuleUnknown PluralRule = iota
+ PluralRuleZero // zero
+ PluralRuleOne // one - singular
+ PluralRuleTwo // two - dual
+ PluralRuleFew // few - paucal
+ PluralRuleMany // many - also used for fractions if they have a separate class
+ PluralRuleOther // other - required—general plural form—also used if the language only has a single form
+)
+
+const (
+ pluralsString = "UnknownZeroOneTwoFewManyOther"
+)
+
+// Translator encapsulates an instance of a locale
+// NOTE: some values are returned as a []byte just in case the caller
+// wishes to add more and can help avoid allocations; otherwise just cast as string
+type Translator interface {
+
+ // The following Functions are for overriding, debugging or developing
+ // with a Translator Locale
+
+ // Locale returns the string value of the translator
+ Locale() string
+
+ // returns an array of cardinal plural rules associated
+ // with this translator
+ PluralsCardinal() []PluralRule
+
+ // returns an array of ordinal plural rules associated
+ // with this translator
+ PluralsOrdinal() []PluralRule
+
+ // returns an array of range plural rules associated
+ // with this translator
+ PluralsRange() []PluralRule
+
+ // returns the cardinal PluralRule given 'num' and digits/precision of 'v' for locale
+ CardinalPluralRule(num float64, v uint64) PluralRule
+
+ // returns the ordinal PluralRule given 'num' and digits/precision of 'v' for locale
+ OrdinalPluralRule(num float64, v uint64) PluralRule
+
+ // returns the ordinal PluralRule given 'num1', 'num2' and digits/precision of 'v1' and 'v2' for locale
+ RangePluralRule(num1 float64, v1 uint64, num2 float64, v2 uint64) PluralRule
+
+ // returns the locales abbreviated month given the 'month' provided
+ MonthAbbreviated(month time.Month) string
+
+ // returns the locales abbreviated months
+ MonthsAbbreviated() []string
+
+ // returns the locales narrow month given the 'month' provided
+ MonthNarrow(month time.Month) string
+
+ // returns the locales narrow months
+ MonthsNarrow() []string
+
+ // returns the locales wide month given the 'month' provided
+ MonthWide(month time.Month) string
+
+ // returns the locales wide months
+ MonthsWide() []string
+
+ // returns the locales abbreviated weekday given the 'weekday' provided
+ WeekdayAbbreviated(weekday time.Weekday) string
+
+ // returns the locales abbreviated weekdays
+ WeekdaysAbbreviated() []string
+
+ // returns the locales narrow weekday given the 'weekday' provided
+ WeekdayNarrow(weekday time.Weekday) string
+
+ // WeekdaysNarrowreturns the locales narrow weekdays
+ WeekdaysNarrow() []string
+
+ // returns the locales short weekday given the 'weekday' provided
+ WeekdayShort(weekday time.Weekday) string
+
+ // returns the locales short weekdays
+ WeekdaysShort() []string
+
+ // returns the locales wide weekday given the 'weekday' provided
+ WeekdayWide(weekday time.Weekday) string
+
+ // returns the locales wide weekdays
+ WeekdaysWide() []string
+
+ // The following Functions are common Formatting functionsfor the Translator's Locale
+
+ // returns 'num' with digits/precision of 'v' for locale and handles both Whole and Real numbers based on 'v'
+ FmtNumber(num float64, v uint64) string
+
+ // returns 'num' with digits/precision of 'v' for locale and handles both Whole and Real numbers based on 'v'
+ // NOTE: 'num' passed into FmtPercent is assumed to be in percent already
+ FmtPercent(num float64, v uint64) string
+
+ // returns the currency representation of 'num' with digits/precision of 'v' for locale
+ FmtCurrency(num float64, v uint64, currency currency.Type) string
+
+ // returns the currency representation of 'num' with digits/precision of 'v' for locale
+ // in accounting notation.
+ FmtAccounting(num float64, v uint64, currency currency.Type) string
+
+ // returns the short date representation of 't' for locale
+ FmtDateShort(t time.Time) string
+
+ // returns the medium date representation of 't' for locale
+ FmtDateMedium(t time.Time) string
+
+ // returns the long date representation of 't' for locale
+ FmtDateLong(t time.Time) string
+
+ // returns the full date representation of 't' for locale
+ FmtDateFull(t time.Time) string
+
+ // returns the short time representation of 't' for locale
+ FmtTimeShort(t time.Time) string
+
+ // returns the medium time representation of 't' for locale
+ FmtTimeMedium(t time.Time) string
+
+ // returns the long time representation of 't' for locale
+ FmtTimeLong(t time.Time) string
+
+ // returns the full time representation of 't' for locale
+ FmtTimeFull(t time.Time) string
+}
+
+// String returns the string value of PluralRule
+func (p PluralRule) String() string {
+
+ switch p {
+ case PluralRuleZero:
+ return pluralsString[7:11]
+ case PluralRuleOne:
+ return pluralsString[11:14]
+ case PluralRuleTwo:
+ return pluralsString[14:17]
+ case PluralRuleFew:
+ return pluralsString[17:20]
+ case PluralRuleMany:
+ return pluralsString[20:24]
+ case PluralRuleOther:
+ return pluralsString[24:]
+ default:
+ return pluralsString[:7]
+ }
+}
+
+//
+// Precision Notes:
+//
+// must specify a precision >= 0, and here is why https://play.golang.org/p/LyL90U0Vyh
+//
+// v := float64(3.141)
+// i := float64(int64(v))
+//
+// fmt.Println(v - i)
+//
+// or
+//
+// s := strconv.FormatFloat(v-i, 'f', -1, 64)
+// fmt.Println(s)
+//
+// these will not print what you'd expect: 0.14100000000000001
+// and so this library requires a precision to be specified, or
+// inaccurate plural rules could be applied.
+//
+//
+//
+// n - absolute value of the source number (integer and decimals).
+// i - integer digits of n.
+// v - number of visible fraction digits in n, with trailing zeros.
+// w - number of visible fraction digits in n, without trailing zeros.
+// f - visible fractional digits in n, with trailing zeros.
+// t - visible fractional digits in n, without trailing zeros.
+//
+//
+// Func(num float64, v uint64) // v = digits/precision and prevents -1 as a special case as this can lead to very unexpected behaviour, see precision note's above.
+//
+// n := math.Abs(num)
+// i := int64(n)
+// v := v
+//
+//
+// w := strconv.FormatFloat(num-float64(i), 'f', int(v), 64) // then parse backwards on string until no more zero's....
+// f := strconv.FormatFloat(n, 'f', int(v), 64) // then turn everything after decimal into an int64
+// t := strconv.FormatFloat(n, 'f', int(v), 64) // then parse backwards on string until no more zero's....
+//
+//
+//
+// General Inclusion Rules
+// - v will always be available inherently
+// - all require n
+// - w requires i
+//
+
+// W returns the number of visible fraction digits in N, without trailing zeros.
+func W(n float64, v uint64) (w int64) {
+
+ s := strconv.FormatFloat(n-float64(int64(n)), 'f', int(v), 64)
+
+ // with either be '0' or '0.xxxx', so if 1 then w will be zero
+ // otherwise need to parse
+ if len(s) != 1 {
+
+ s = s[2:]
+ end := len(s) + 1
+
+ for i := end; i >= 0; i-- {
+ if s[i] != '0' {
+ end = i + 1
+ break
+ }
+ }
+
+ w = int64(len(s[:end]))
+ }
+
+ return
+}
+
+// F returns the visible fractional digits in N, with trailing zeros.
+func F(n float64, v uint64) (f int64) {
+
+ s := strconv.FormatFloat(n-float64(int64(n)), 'f', int(v), 64)
+
+ // with either be '0' or '0.xxxx', so if 1 then f will be zero
+ // otherwise need to parse
+ if len(s) != 1 {
+
+ // ignoring error, because it can't fail as we generated
+ // the string internally from a real number
+ f, _ = strconv.ParseInt(s[2:], 10, 64)
+ }
+
+ return
+}
+
+// T returns the visible fractional digits in N, without trailing zeros.
+func T(n float64, v uint64) (t int64) {
+
+ s := strconv.FormatFloat(n-float64(int64(n)), 'f', int(v), 64)
+
+ // with either be '0' or '0.xxxx', so if 1 then t will be zero
+ // otherwise need to parse
+ if len(s) != 1 {
+
+ s = s[2:]
+ end := len(s) + 1
+
+ for i := end; i >= 0; i-- {
+ if s[i] != '0' {
+ end = i + 1
+ break
+ }
+ }
+
+ // ignoring error, because it can't fail as we generated
+ // the string internally from a real number
+ t, _ = strconv.ParseInt(s[:end], 10, 64)
+ }
+
+ return
+}
diff --git a/backend/vendor/github.com/go-playground/universal-translator/.gitignore b/backend/vendor/github.com/go-playground/universal-translator/.gitignore
new file mode 100644
index 00000000..26617857
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
\ No newline at end of file
diff --git a/backend/vendor/github.com/go-playground/universal-translator/LICENSE b/backend/vendor/github.com/go-playground/universal-translator/LICENSE
new file mode 100644
index 00000000..8d8aba15
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Go Playground
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/backend/vendor/github.com/go-playground/universal-translator/README.md b/backend/vendor/github.com/go-playground/universal-translator/README.md
new file mode 100644
index 00000000..24aef158
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/README.md
@@ -0,0 +1,90 @@
+## universal-translator
+
+
+[](https://semaphoreci.com/joeybloggs/universal-translator)
+[](https://coveralls.io/github/go-playground/universal-translator)
+[](https://goreportcard.com/report/github.com/go-playground/universal-translator)
+[](https://godoc.org/github.com/go-playground/universal-translator)
+
+[](https://gitter.im/go-playground/universal-translator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
+
+Universal Translator is an i18n Translator for Go/Golang using CLDR data + pluralization rules
+
+Why another i18n library?
+--------------------------
+Because none of the plural rules seem to be correct out there, including the previous implementation of this package,
+so I took it upon myself to create [locales](https://github.com/go-playground/locales) for everyone to use; this package
+is a thin wrapper around [locales](https://github.com/go-playground/locales) in order to store and translate text for
+use in your applications.
+
+Features
+--------
+- [x] Rules generated from the [CLDR](http://cldr.unicode.org/index/downloads) data, v30.0.3
+- [x] Contains Cardinal, Ordinal and Range Plural Rules
+- [x] Contains Month, Weekday and Timezone translations built in
+- [x] Contains Date & Time formatting functions
+- [x] Contains Number, Currency, Accounting and Percent formatting functions
+- [x] Supports the "Gregorian" calendar only ( my time isn't unlimited, had to draw the line somewhere )
+- [x] Support loading translations from files
+- [x] Exporting translations to file(s), mainly for getting them professionally translated
+- [ ] Code Generation for translation files -> Go code.. i.e. after it has been professionally translated
+- [ ] Tests for all languages, I need help with this, please see [here](https://github.com/go-playground/locales/issues/1)
+
+Installation
+-----------
+
+Use go get
+
+```shell
+go get github.com/go-playground/universal-translator
+```
+
+Usage & Documentation
+-------
+
+Please see https://godoc.org/github.com/go-playground/universal-translator for usage docs
+
+##### Examples:
+
+- [Basic](https://github.com/go-playground/universal-translator/tree/master/examples/basic)
+- [Full - no files](https://github.com/go-playground/universal-translator/tree/master/examples/full-no-files)
+- [Full - with files](https://github.com/go-playground/universal-translator/tree/master/examples/full-with-files)
+
+File formatting
+--------------
+All types, Plain substitution, Cardinal, Ordinal and Range translations can all be contained withing the same file(s);
+they are only separated for easy viewing.
+
+##### Examples:
+
+- [Formats](https://github.com/go-playground/universal-translator/tree/master/examples/file-formats)
+
+##### Basic Makeup
+NOTE: not all fields are needed for all translation types, see [examples](https://github.com/go-playground/universal-translator/tree/master/examples/file-formats)
+```json
+{
+ "locale": "en",
+ "key": "days-left",
+ "trans": "You have {0} day left.",
+ "type": "Cardinal",
+ "rule": "One",
+ "override": false
+}
+```
+|Field|Description|
+|---|---|
+|locale|The locale for which the translation is for.|
+|key|The translation key that will be used to store and lookup each translation; normally it is a string or integer.|
+|trans|The actual translation text.|
+|type|The type of translation Cardinal, Ordinal, Range or "" for a plain substitution(not required to be defined if plain used)|
+|rule|The plural rule for which the translation is for eg. One, Two, Few, Many or Other.(not required to be defined if plain used)|
+|override|If you wish to override an existing translation that has already been registered, set this to 'true'. 99% of the time there is no need to define it.|
+
+Help With Tests
+---------------
+To anyone interesting in helping or contributing, I sure could use some help creating tests for each language.
+Please see issue [here](https://github.com/go-playground/locales/issues/1) for details.
+
+License
+------
+Distributed under MIT License, please see license file in code for more details.
diff --git a/backend/vendor/github.com/go-playground/universal-translator/errors.go b/backend/vendor/github.com/go-playground/universal-translator/errors.go
new file mode 100644
index 00000000..38b163b6
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/errors.go
@@ -0,0 +1,148 @@
+package ut
+
+import (
+ "errors"
+ "fmt"
+
+ "github.com/go-playground/locales"
+)
+
+var (
+ // ErrUnknowTranslation indicates the translation could not be found
+ ErrUnknowTranslation = errors.New("Unknown Translation")
+)
+
+var _ error = new(ErrConflictingTranslation)
+var _ error = new(ErrRangeTranslation)
+var _ error = new(ErrOrdinalTranslation)
+var _ error = new(ErrCardinalTranslation)
+var _ error = new(ErrMissingPluralTranslation)
+var _ error = new(ErrExistingTranslator)
+
+// ErrExistingTranslator is the error representing a conflicting translator
+type ErrExistingTranslator struct {
+ locale string
+}
+
+// Error returns ErrExistingTranslator's internal error text
+func (e *ErrExistingTranslator) Error() string {
+ return fmt.Sprintf("error: conflicting translator for locale '%s'", e.locale)
+}
+
+// ErrConflictingTranslation is the error representing a conflicting translation
+type ErrConflictingTranslation struct {
+ locale string
+ key interface{}
+ rule locales.PluralRule
+ text string
+}
+
+// Error returns ErrConflictingTranslation's internal error text
+func (e *ErrConflictingTranslation) Error() string {
+
+ if _, ok := e.key.(string); !ok {
+ return fmt.Sprintf("error: conflicting key '%#v' rule '%s' with text '%s' for locale '%s', value being ignored", e.key, e.rule, e.text, e.locale)
+ }
+
+ return fmt.Sprintf("error: conflicting key '%s' rule '%s' with text '%s' for locale '%s', value being ignored", e.key, e.rule, e.text, e.locale)
+}
+
+// ErrRangeTranslation is the error representing a range translation error
+type ErrRangeTranslation struct {
+ text string
+}
+
+// Error returns ErrRangeTranslation's internal error text
+func (e *ErrRangeTranslation) Error() string {
+ return e.text
+}
+
+// ErrOrdinalTranslation is the error representing an ordinal translation error
+type ErrOrdinalTranslation struct {
+ text string
+}
+
+// Error returns ErrOrdinalTranslation's internal error text
+func (e *ErrOrdinalTranslation) Error() string {
+ return e.text
+}
+
+// ErrCardinalTranslation is the error representing a cardinal translation error
+type ErrCardinalTranslation struct {
+ text string
+}
+
+// Error returns ErrCardinalTranslation's internal error text
+func (e *ErrCardinalTranslation) Error() string {
+ return e.text
+}
+
+// ErrMissingPluralTranslation is the error signifying a missing translation given
+// the locales plural rules.
+type ErrMissingPluralTranslation struct {
+ locale string
+ key interface{}
+ rule locales.PluralRule
+ translationType string
+}
+
+// Error returns ErrMissingPluralTranslation's internal error text
+func (e *ErrMissingPluralTranslation) Error() string {
+
+ if _, ok := e.key.(string); !ok {
+ return fmt.Sprintf("error: missing '%s' plural rule '%s' for translation with key '%#v' and locale '%s'", e.translationType, e.rule, e.key, e.locale)
+ }
+
+ return fmt.Sprintf("error: missing '%s' plural rule '%s' for translation with key '%s' and locale '%s'", e.translationType, e.rule, e.key, e.locale)
+}
+
+// ErrMissingBracket is the error representing a missing bracket in a translation
+// eg. This is a {0 <-- missing ending '}'
+type ErrMissingBracket struct {
+ locale string
+ key interface{}
+ text string
+}
+
+// Error returns ErrMissingBracket error message
+func (e *ErrMissingBracket) Error() string {
+ return fmt.Sprintf("error: missing bracket '{}', in translation. locale: '%s' key: '%v' text: '%s'", e.locale, e.key, e.text)
+}
+
+// ErrBadParamSyntax is the error representing a bad parameter definition in a translation
+// eg. This is a {must-be-int}
+type ErrBadParamSyntax struct {
+ locale string
+ param string
+ key interface{}
+ text string
+}
+
+// Error returns ErrBadParamSyntax error message
+func (e *ErrBadParamSyntax) Error() string {
+ return fmt.Sprintf("error: bad parameter syntax, missing parameter '%s' in translation. locale: '%s' key: '%v' text: '%s'", e.param, e.locale, e.key, e.text)
+}
+
+// import/export errors
+
+// ErrMissingLocale is the error representing an expected locale that could
+// not be found aka locale not registered with the UniversalTranslator Instance
+type ErrMissingLocale struct {
+ locale string
+}
+
+// Error returns ErrMissingLocale's internal error text
+func (e *ErrMissingLocale) Error() string {
+ return fmt.Sprintf("error: locale '%s' not registered.", e.locale)
+}
+
+// ErrBadPluralDefinition is the error representing an incorrect plural definition
+// usually found within translations defined within files during the import process.
+type ErrBadPluralDefinition struct {
+ tl translation
+}
+
+// Error returns ErrBadPluralDefinition's internal error text
+func (e *ErrBadPluralDefinition) Error() string {
+ return fmt.Sprintf("error: bad plural definition '%#v'", e.tl)
+}
diff --git a/backend/vendor/github.com/go-playground/universal-translator/import_export.go b/backend/vendor/github.com/go-playground/universal-translator/import_export.go
new file mode 100644
index 00000000..7bd76f26
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/import_export.go
@@ -0,0 +1,274 @@
+package ut
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+
+ "io"
+
+ "github.com/go-playground/locales"
+)
+
+type translation struct {
+ Locale string `json:"locale"`
+ Key interface{} `json:"key"` // either string or integer
+ Translation string `json:"trans"`
+ PluralType string `json:"type,omitempty"`
+ PluralRule string `json:"rule,omitempty"`
+ OverrideExisting bool `json:"override,omitempty"`
+}
+
+const (
+ cardinalType = "Cardinal"
+ ordinalType = "Ordinal"
+ rangeType = "Range"
+)
+
+// ImportExportFormat is the format of the file import or export
+type ImportExportFormat uint8
+
+// supported Export Formats
+const (
+ FormatJSON ImportExportFormat = iota
+)
+
+// Export writes the translations out to a file on disk.
+//
+// NOTE: this currently only works with string or int translations keys.
+func (t *UniversalTranslator) Export(format ImportExportFormat, dirname string) error {
+
+ _, err := os.Stat(dirname)
+ fmt.Println(dirname, err, os.IsNotExist(err))
+ if err != nil {
+
+ if !os.IsNotExist(err) {
+ return err
+ }
+
+ if err = os.MkdirAll(dirname, 0744); err != nil {
+ return err
+ }
+ }
+
+ // build up translations
+ var trans []translation
+ var b []byte
+ var ext string
+
+ for _, locale := range t.translators {
+
+ for k, v := range locale.(*translator).translations {
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k,
+ Translation: v.text,
+ })
+ }
+
+ for k, pluralTrans := range locale.(*translator).cardinalTanslations {
+
+ for i, plural := range pluralTrans {
+
+ // leave enough for all plural rules
+ // but not all are set for all languages.
+ if plural == nil {
+ continue
+ }
+
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k.(string),
+ Translation: plural.text,
+ PluralType: cardinalType,
+ PluralRule: locales.PluralRule(i).String(),
+ })
+ }
+ }
+
+ for k, pluralTrans := range locale.(*translator).ordinalTanslations {
+
+ for i, plural := range pluralTrans {
+
+ // leave enough for all plural rules
+ // but not all are set for all languages.
+ if plural == nil {
+ continue
+ }
+
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k.(string),
+ Translation: plural.text,
+ PluralType: ordinalType,
+ PluralRule: locales.PluralRule(i).String(),
+ })
+ }
+ }
+
+ for k, pluralTrans := range locale.(*translator).rangeTanslations {
+
+ for i, plural := range pluralTrans {
+
+ // leave enough for all plural rules
+ // but not all are set for all languages.
+ if plural == nil {
+ continue
+ }
+
+ trans = append(trans, translation{
+ Locale: locale.Locale(),
+ Key: k.(string),
+ Translation: plural.text,
+ PluralType: rangeType,
+ PluralRule: locales.PluralRule(i).String(),
+ })
+ }
+ }
+
+ switch format {
+ case FormatJSON:
+ b, err = json.MarshalIndent(trans, "", " ")
+ ext = ".json"
+ }
+
+ if err != nil {
+ return err
+ }
+
+ err = ioutil.WriteFile(filepath.Join(dirname, fmt.Sprintf("%s%s", locale.Locale(), ext)), b, 0644)
+ if err != nil {
+ return err
+ }
+
+ trans = trans[0:0]
+ }
+
+ return nil
+}
+
+// Import reads the translations out of a file or directory on disk.
+//
+// NOTE: this currently only works with string or int translations keys.
+func (t *UniversalTranslator) Import(format ImportExportFormat, dirnameOrFilename string) error {
+
+ fi, err := os.Stat(dirnameOrFilename)
+ if err != nil {
+ return err
+ }
+
+ processFn := func(filename string) error {
+
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ return t.ImportByReader(format, f)
+ }
+
+ if !fi.IsDir() {
+ return processFn(dirnameOrFilename)
+ }
+
+ // recursively go through directory
+ walker := func(path string, info os.FileInfo, err error) error {
+
+ if info.IsDir() {
+ return nil
+ }
+
+ switch format {
+ case FormatJSON:
+ // skip non JSON files
+ if filepath.Ext(info.Name()) != ".json" {
+ return nil
+ }
+ }
+
+ return processFn(path)
+ }
+
+ return filepath.Walk(dirnameOrFilename, walker)
+}
+
+// ImportByReader imports the the translations found within the contents read from the supplied reader.
+//
+// NOTE: generally used when assets have been embedded into the binary and are already in memory.
+func (t *UniversalTranslator) ImportByReader(format ImportExportFormat, reader io.Reader) error {
+
+ b, err := ioutil.ReadAll(reader)
+ if err != nil {
+ return err
+ }
+
+ var trans []translation
+
+ switch format {
+ case FormatJSON:
+ err = json.Unmarshal(b, &trans)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ for _, tl := range trans {
+
+ locale, found := t.FindTranslator(tl.Locale)
+ if !found {
+ return &ErrMissingLocale{locale: tl.Locale}
+ }
+
+ pr := stringToPR(tl.PluralRule)
+
+ if pr == locales.PluralRuleUnknown {
+
+ err = locale.Add(tl.Key, tl.Translation, tl.OverrideExisting)
+ if err != nil {
+ return err
+ }
+
+ continue
+ }
+
+ switch tl.PluralType {
+ case cardinalType:
+ err = locale.AddCardinal(tl.Key, tl.Translation, pr, tl.OverrideExisting)
+ case ordinalType:
+ err = locale.AddOrdinal(tl.Key, tl.Translation, pr, tl.OverrideExisting)
+ case rangeType:
+ err = locale.AddRange(tl.Key, tl.Translation, pr, tl.OverrideExisting)
+ default:
+ return &ErrBadPluralDefinition{tl: tl}
+ }
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func stringToPR(s string) locales.PluralRule {
+
+ switch s {
+ case "One":
+ return locales.PluralRuleOne
+ case "Two":
+ return locales.PluralRuleTwo
+ case "Few":
+ return locales.PluralRuleFew
+ case "Many":
+ return locales.PluralRuleMany
+ case "Other":
+ return locales.PluralRuleOther
+ default:
+ return locales.PluralRuleUnknown
+ }
+
+}
diff --git a/backend/vendor/github.com/go-playground/universal-translator/logo.png b/backend/vendor/github.com/go-playground/universal-translator/logo.png
new file mode 100644
index 00000000..a37aa8c0
Binary files /dev/null and b/backend/vendor/github.com/go-playground/universal-translator/logo.png differ
diff --git a/backend/vendor/github.com/go-playground/universal-translator/translator.go b/backend/vendor/github.com/go-playground/universal-translator/translator.go
new file mode 100644
index 00000000..cfafce8a
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/translator.go
@@ -0,0 +1,420 @@
+package ut
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/go-playground/locales"
+)
+
+const (
+ paramZero = "{0}"
+ paramOne = "{1}"
+ unknownTranslation = ""
+)
+
+// Translator is universal translators
+// translator instance which is a thin wrapper
+// around locales.Translator instance providing
+// some extra functionality
+type Translator interface {
+ locales.Translator
+
+ // adds a normal translation for a particular language/locale
+ // {#} is the only replacement type accepted and are ad infinitum
+ // eg. one: '{0} day left' other: '{0} days left'
+ Add(key interface{}, text string, override bool) error
+
+ // adds a cardinal plural translation for a particular language/locale
+ // {0} is the only replacement type accepted and only one variable is accepted as
+ // multiple cannot be used for a plural rule determination, unless it is a range;
+ // see AddRange below.
+ // eg. in locale 'en' one: '{0} day left' other: '{0} days left'
+ AddCardinal(key interface{}, text string, rule locales.PluralRule, override bool) error
+
+ // adds an ordinal plural translation for a particular language/locale
+ // {0} is the only replacement type accepted and only one variable is accepted as
+ // multiple cannot be used for a plural rule determination, unless it is a range;
+ // see AddRange below.
+ // eg. in locale 'en' one: '{0}st day of spring' other: '{0}nd day of spring'
+ // - 1st, 2nd, 3rd...
+ AddOrdinal(key interface{}, text string, rule locales.PluralRule, override bool) error
+
+ // adds a range plural translation for a particular language/locale
+ // {0} and {1} are the only replacement types accepted and only these are accepted.
+ // eg. in locale 'nl' one: '{0}-{1} day left' other: '{0}-{1} days left'
+ AddRange(key interface{}, text string, rule locales.PluralRule, override bool) error
+
+ // creates the translation for the locale given the 'key' and params passed in
+ T(key interface{}, params ...string) (string, error)
+
+ // creates the cardinal translation for the locale given the 'key', 'num' and 'digit' arguments
+ // and param passed in
+ C(key interface{}, num float64, digits uint64, param string) (string, error)
+
+ // creates the ordinal translation for the locale given the 'key', 'num' and 'digit' arguments
+ // and param passed in
+ O(key interface{}, num float64, digits uint64, param string) (string, error)
+
+ // creates the range translation for the locale given the 'key', 'num1', 'digit1', 'num2' and
+ // 'digit2' arguments and 'param1' and 'param2' passed in
+ R(key interface{}, num1 float64, digits1 uint64, num2 float64, digits2 uint64, param1, param2 string) (string, error)
+
+ // VerifyTranslations checks to ensures that no plural rules have been
+ // missed within the translations.
+ VerifyTranslations() error
+}
+
+var _ Translator = new(translator)
+var _ locales.Translator = new(translator)
+
+type translator struct {
+ locales.Translator
+ translations map[interface{}]*transText
+ cardinalTanslations map[interface{}][]*transText // array index is mapped to locales.PluralRule index + the locales.PluralRuleUnknown
+ ordinalTanslations map[interface{}][]*transText
+ rangeTanslations map[interface{}][]*transText
+}
+
+type transText struct {
+ text string
+ indexes []int
+}
+
+func newTranslator(trans locales.Translator) Translator {
+ return &translator{
+ Translator: trans,
+ translations: make(map[interface{}]*transText), // translation text broken up by byte index
+ cardinalTanslations: make(map[interface{}][]*transText),
+ ordinalTanslations: make(map[interface{}][]*transText),
+ rangeTanslations: make(map[interface{}][]*transText),
+ }
+}
+
+// Add adds a normal translation for a particular language/locale
+// {#} is the only replacement type accepted and are ad infinitum
+// eg. one: '{0} day left' other: '{0} days left'
+func (t *translator) Add(key interface{}, text string, override bool) error {
+
+ if _, ok := t.translations[key]; ok && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, text: text}
+ }
+
+ lb := strings.Count(text, "{")
+ rb := strings.Count(text, "}")
+
+ if lb != rb {
+ return &ErrMissingBracket{locale: t.Locale(), key: key, text: text}
+ }
+
+ trans := &transText{
+ text: text,
+ }
+
+ var idx int
+
+ for i := 0; i < lb; i++ {
+ s := "{" + strconv.Itoa(i) + "}"
+ idx = strings.Index(text, s)
+ if idx == -1 {
+ return &ErrBadParamSyntax{locale: t.Locale(), param: s, key: key, text: text}
+ }
+
+ trans.indexes = append(trans.indexes, idx)
+ trans.indexes = append(trans.indexes, idx+len(s))
+ }
+
+ t.translations[key] = trans
+
+ return nil
+}
+
+// AddCardinal adds a cardinal plural translation for a particular language/locale
+// {0} is the only replacement type accepted and only one variable is accepted as
+// multiple cannot be used for a plural rule determination, unless it is a range;
+// see AddRange below.
+// eg. in locale 'en' one: '{0} day left' other: '{0} days left'
+func (t *translator) AddCardinal(key interface{}, text string, rule locales.PluralRule, override bool) error {
+
+ var verified bool
+
+ // verify plural rule exists for locale
+ for _, pr := range t.PluralsCardinal() {
+ if pr == rule {
+ verified = true
+ break
+ }
+ }
+
+ if !verified {
+ return &ErrCardinalTranslation{text: fmt.Sprintf("error: cardinal plural rule '%s' does not exist for locale '%s' key: '%v' text: '%s'", rule, t.Locale(), key, text)}
+ }
+
+ tarr, ok := t.cardinalTanslations[key]
+ if ok {
+ // verify not adding a conflicting record
+ if len(tarr) > 0 && tarr[rule] != nil && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, rule: rule, text: text}
+ }
+
+ } else {
+ tarr = make([]*transText, 7, 7)
+ t.cardinalTanslations[key] = tarr
+ }
+
+ trans := &transText{
+ text: text,
+ indexes: make([]int, 2, 2),
+ }
+
+ tarr[rule] = trans
+
+ idx := strings.Index(text, paramZero)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrCardinalTranslation{text: fmt.Sprintf("error: parameter '%s' not found, may want to use 'Add' instead of 'AddCardinal'. locale: '%s' key: '%v' text: '%s'", paramZero, t.Locale(), key, text)}
+ }
+
+ trans.indexes[0] = idx
+ trans.indexes[1] = idx + len(paramZero)
+
+ return nil
+}
+
+// AddOrdinal adds an ordinal plural translation for a particular language/locale
+// {0} is the only replacement type accepted and only one variable is accepted as
+// multiple cannot be used for a plural rule determination, unless it is a range;
+// see AddRange below.
+// eg. in locale 'en' one: '{0}st day of spring' other: '{0}nd day of spring' - 1st, 2nd, 3rd...
+func (t *translator) AddOrdinal(key interface{}, text string, rule locales.PluralRule, override bool) error {
+
+ var verified bool
+
+ // verify plural rule exists for locale
+ for _, pr := range t.PluralsOrdinal() {
+ if pr == rule {
+ verified = true
+ break
+ }
+ }
+
+ if !verified {
+ return &ErrOrdinalTranslation{text: fmt.Sprintf("error: ordinal plural rule '%s' does not exist for locale '%s' key: '%v' text: '%s'", rule, t.Locale(), key, text)}
+ }
+
+ tarr, ok := t.ordinalTanslations[key]
+ if ok {
+ // verify not adding a conflicting record
+ if len(tarr) > 0 && tarr[rule] != nil && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, rule: rule, text: text}
+ }
+
+ } else {
+ tarr = make([]*transText, 7, 7)
+ t.ordinalTanslations[key] = tarr
+ }
+
+ trans := &transText{
+ text: text,
+ indexes: make([]int, 2, 2),
+ }
+
+ tarr[rule] = trans
+
+ idx := strings.Index(text, paramZero)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrOrdinalTranslation{text: fmt.Sprintf("error: parameter '%s' not found, may want to use 'Add' instead of 'AddOrdinal'. locale: '%s' key: '%v' text: '%s'", paramZero, t.Locale(), key, text)}
+ }
+
+ trans.indexes[0] = idx
+ trans.indexes[1] = idx + len(paramZero)
+
+ return nil
+}
+
+// AddRange adds a range plural translation for a particular language/locale
+// {0} and {1} are the only replacement types accepted and only these are accepted.
+// eg. in locale 'nl' one: '{0}-{1} day left' other: '{0}-{1} days left'
+func (t *translator) AddRange(key interface{}, text string, rule locales.PluralRule, override bool) error {
+
+ var verified bool
+
+ // verify plural rule exists for locale
+ for _, pr := range t.PluralsRange() {
+ if pr == rule {
+ verified = true
+ break
+ }
+ }
+
+ if !verified {
+ return &ErrRangeTranslation{text: fmt.Sprintf("error: range plural rule '%s' does not exist for locale '%s' key: '%v' text: '%s'", rule, t.Locale(), key, text)}
+ }
+
+ tarr, ok := t.rangeTanslations[key]
+ if ok {
+ // verify not adding a conflicting record
+ if len(tarr) > 0 && tarr[rule] != nil && !override {
+ return &ErrConflictingTranslation{locale: t.Locale(), key: key, rule: rule, text: text}
+ }
+
+ } else {
+ tarr = make([]*transText, 7, 7)
+ t.rangeTanslations[key] = tarr
+ }
+
+ trans := &transText{
+ text: text,
+ indexes: make([]int, 4, 4),
+ }
+
+ tarr[rule] = trans
+
+ idx := strings.Index(text, paramZero)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrRangeTranslation{text: fmt.Sprintf("error: parameter '%s' not found, are you sure you're adding a Range Translation? locale: '%s' key: '%v' text: '%s'", paramZero, t.Locale(), key, text)}
+ }
+
+ trans.indexes[0] = idx
+ trans.indexes[1] = idx + len(paramZero)
+
+ idx = strings.Index(text, paramOne)
+ if idx == -1 {
+ tarr[rule] = nil
+ return &ErrRangeTranslation{text: fmt.Sprintf("error: parameter '%s' not found, a Range Translation requires two parameters. locale: '%s' key: '%v' text: '%s'", paramOne, t.Locale(), key, text)}
+ }
+
+ trans.indexes[2] = idx
+ trans.indexes[3] = idx + len(paramOne)
+
+ return nil
+}
+
+// T creates the translation for the locale given the 'key' and params passed in
+func (t *translator) T(key interface{}, params ...string) (string, error) {
+
+ trans, ok := t.translations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ b := make([]byte, 0, 64)
+
+ var start, end, count int
+
+ for i := 0; i < len(trans.indexes); i++ {
+ end = trans.indexes[i]
+ b = append(b, trans.text[start:end]...)
+ b = append(b, params[count]...)
+ i++
+ start = trans.indexes[i]
+ count++
+ }
+
+ b = append(b, trans.text[start:]...)
+
+ return string(b), nil
+}
+
+// C creates the cardinal translation for the locale given the 'key', 'num' and 'digit' arguments and param passed in
+func (t *translator) C(key interface{}, num float64, digits uint64, param string) (string, error) {
+
+ tarr, ok := t.cardinalTanslations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ rule := t.CardinalPluralRule(num, digits)
+
+ trans := tarr[rule]
+
+ b := make([]byte, 0, 64)
+ b = append(b, trans.text[:trans.indexes[0]]...)
+ b = append(b, param...)
+ b = append(b, trans.text[trans.indexes[1]:]...)
+
+ return string(b), nil
+}
+
+// O creates the ordinal translation for the locale given the 'key', 'num' and 'digit' arguments and param passed in
+func (t *translator) O(key interface{}, num float64, digits uint64, param string) (string, error) {
+
+ tarr, ok := t.ordinalTanslations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ rule := t.OrdinalPluralRule(num, digits)
+
+ trans := tarr[rule]
+
+ b := make([]byte, 0, 64)
+ b = append(b, trans.text[:trans.indexes[0]]...)
+ b = append(b, param...)
+ b = append(b, trans.text[trans.indexes[1]:]...)
+
+ return string(b), nil
+}
+
+// R creates the range translation for the locale given the 'key', 'num1', 'digit1', 'num2' and 'digit2' arguments
+// and 'param1' and 'param2' passed in
+func (t *translator) R(key interface{}, num1 float64, digits1 uint64, num2 float64, digits2 uint64, param1, param2 string) (string, error) {
+
+ tarr, ok := t.rangeTanslations[key]
+ if !ok {
+ return unknownTranslation, ErrUnknowTranslation
+ }
+
+ rule := t.RangePluralRule(num1, digits1, num2, digits2)
+
+ trans := tarr[rule]
+
+ b := make([]byte, 0, 64)
+ b = append(b, trans.text[:trans.indexes[0]]...)
+ b = append(b, param1...)
+ b = append(b, trans.text[trans.indexes[1]:trans.indexes[2]]...)
+ b = append(b, param2...)
+ b = append(b, trans.text[trans.indexes[3]:]...)
+
+ return string(b), nil
+}
+
+// VerifyTranslations checks to ensures that no plural rules have been
+// missed within the translations.
+func (t *translator) VerifyTranslations() error {
+
+ for k, v := range t.cardinalTanslations {
+
+ for _, rule := range t.PluralsCardinal() {
+
+ if v[rule] == nil {
+ return &ErrMissingPluralTranslation{locale: t.Locale(), translationType: "plural", rule: rule, key: k}
+ }
+ }
+ }
+
+ for k, v := range t.ordinalTanslations {
+
+ for _, rule := range t.PluralsOrdinal() {
+
+ if v[rule] == nil {
+ return &ErrMissingPluralTranslation{locale: t.Locale(), translationType: "ordinal", rule: rule, key: k}
+ }
+ }
+ }
+
+ for k, v := range t.rangeTanslations {
+
+ for _, rule := range t.PluralsRange() {
+
+ if v[rule] == nil {
+ return &ErrMissingPluralTranslation{locale: t.Locale(), translationType: "range", rule: rule, key: k}
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/backend/vendor/github.com/go-playground/universal-translator/universal_translator.go b/backend/vendor/github.com/go-playground/universal-translator/universal_translator.go
new file mode 100644
index 00000000..dbf707f5
--- /dev/null
+++ b/backend/vendor/github.com/go-playground/universal-translator/universal_translator.go
@@ -0,0 +1,113 @@
+package ut
+
+import (
+ "strings"
+
+ "github.com/go-playground/locales"
+)
+
+// UniversalTranslator holds all locale & translation data
+type UniversalTranslator struct {
+ translators map[string]Translator
+ fallback Translator
+}
+
+// New returns a new UniversalTranslator instance set with
+// the fallback locale and locales it should support
+func New(fallback locales.Translator, supportedLocales ...locales.Translator) *UniversalTranslator {
+
+ t := &UniversalTranslator{
+ translators: make(map[string]Translator),
+ }
+
+ for _, v := range supportedLocales {
+
+ trans := newTranslator(v)
+ t.translators[strings.ToLower(trans.Locale())] = trans
+
+ if fallback.Locale() == v.Locale() {
+ t.fallback = trans
+ }
+ }
+
+ if t.fallback == nil && fallback != nil {
+ t.fallback = newTranslator(fallback)
+ }
+
+ return t
+}
+
+// FindTranslator trys to find a Translator based on an array of locales
+// and returns the first one it can find, otherwise returns the
+// fallback translator.
+func (t *UniversalTranslator) FindTranslator(locales ...string) (trans Translator, found bool) {
+
+ for _, locale := range locales {
+
+ if trans, found = t.translators[strings.ToLower(locale)]; found {
+ return
+ }
+ }
+
+ return t.fallback, false
+}
+
+// GetTranslator returns the specified translator for the given locale,
+// or fallback if not found
+func (t *UniversalTranslator) GetTranslator(locale string) (trans Translator, found bool) {
+
+ if trans, found = t.translators[strings.ToLower(locale)]; found {
+ return
+ }
+
+ return t.fallback, false
+}
+
+// GetFallback returns the fallback locale
+func (t *UniversalTranslator) GetFallback() Translator {
+ return t.fallback
+}
+
+// AddTranslator adds the supplied translator, if it already exists the override param
+// will be checked and if false an error will be returned, otherwise the translator will be
+// overridden; if the fallback matches the supplied translator it will be overridden as well
+// NOTE: this is normally only used when translator is embedded within a library
+func (t *UniversalTranslator) AddTranslator(translator locales.Translator, override bool) error {
+
+ lc := strings.ToLower(translator.Locale())
+ _, ok := t.translators[lc]
+ if ok && !override {
+ return &ErrExistingTranslator{locale: translator.Locale()}
+ }
+
+ trans := newTranslator(translator)
+
+ if t.fallback.Locale() == translator.Locale() {
+
+ // because it's optional to have a fallback, I don't impose that limitation
+ // don't know why you wouldn't but...
+ if !override {
+ return &ErrExistingTranslator{locale: translator.Locale()}
+ }
+
+ t.fallback = trans
+ }
+
+ t.translators[lc] = trans
+
+ return nil
+}
+
+// VerifyTranslations runs through all locales and identifies any issues
+// eg. missing plural rules for a locale
+func (t *UniversalTranslator) VerifyTranslations() (err error) {
+
+ for _, trans := range t.translators {
+ err = trans.VerifyTranslations()
+ if err != nil {
+ return
+ }
+ }
+
+ return
+}
diff --git a/backend/vendor/github.com/gopherjs/gopherjs/LICENSE b/backend/vendor/github.com/gopherjs/gopherjs/LICENSE
new file mode 100644
index 00000000..d496fef1
--- /dev/null
+++ b/backend/vendor/github.com/gopherjs/gopherjs/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2013 Richard Musiol. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/backend/vendor/github.com/gopherjs/gopherjs/js/js.go b/backend/vendor/github.com/gopherjs/gopherjs/js/js.go
new file mode 100644
index 00000000..3fbf1d88
--- /dev/null
+++ b/backend/vendor/github.com/gopherjs/gopherjs/js/js.go
@@ -0,0 +1,168 @@
+// Package js provides functions for interacting with native JavaScript APIs. Calls to these functions are treated specially by GopherJS and translated directly to their corresponding JavaScript syntax.
+//
+// Use MakeWrapper to expose methods to JavaScript. When passing values directly, the following type conversions are performed:
+//
+// | Go type | JavaScript type | Conversions back to interface{} |
+// | --------------------- | --------------------- | ------------------------------- |
+// | bool | Boolean | bool |
+// | integers and floats | Number | float64 |
+// | string | String | string |
+// | []int8 | Int8Array | []int8 |
+// | []int16 | Int16Array | []int16 |
+// | []int32, []int | Int32Array | []int |
+// | []uint8 | Uint8Array | []uint8 |
+// | []uint16 | Uint16Array | []uint16 |
+// | []uint32, []uint | Uint32Array | []uint |
+// | []float32 | Float32Array | []float32 |
+// | []float64 | Float64Array | []float64 |
+// | all other slices | Array | []interface{} |
+// | arrays | see slice type | see slice type |
+// | functions | Function | func(...interface{}) *js.Object |
+// | time.Time | Date | time.Time |
+// | - | instanceof Node | *js.Object |
+// | maps, structs | instanceof Object | map[string]interface{} |
+//
+// Additionally, for a struct containing a *js.Object field, only the content of the field will be passed to JavaScript and vice versa.
+package js
+
+// Object is a container for a native JavaScript object. Calls to its methods are treated specially by GopherJS and translated directly to their JavaScript syntax. A nil pointer to Object is equal to JavaScript's "null". Object can not be used as a map key.
+type Object struct{ object *Object }
+
+// Get returns the object's property with the given key.
+func (o *Object) Get(key string) *Object { return o.object.Get(key) }
+
+// Set assigns the value to the object's property with the given key.
+func (o *Object) Set(key string, value interface{}) { o.object.Set(key, value) }
+
+// Delete removes the object's property with the given key.
+func (o *Object) Delete(key string) { o.object.Delete(key) }
+
+// Length returns the object's "length" property, converted to int.
+func (o *Object) Length() int { return o.object.Length() }
+
+// Index returns the i'th element of an array.
+func (o *Object) Index(i int) *Object { return o.object.Index(i) }
+
+// SetIndex sets the i'th element of an array.
+func (o *Object) SetIndex(i int, value interface{}) { o.object.SetIndex(i, value) }
+
+// Call calls the object's method with the given name.
+func (o *Object) Call(name string, args ...interface{}) *Object { return o.object.Call(name, args...) }
+
+// Invoke calls the object itself. This will fail if it is not a function.
+func (o *Object) Invoke(args ...interface{}) *Object { return o.object.Invoke(args...) }
+
+// New creates a new instance of this type object. This will fail if it not a function (constructor).
+func (o *Object) New(args ...interface{}) *Object { return o.object.New(args...) }
+
+// Bool returns the object converted to bool according to JavaScript type conversions.
+func (o *Object) Bool() bool { return o.object.Bool() }
+
+// String returns the object converted to string according to JavaScript type conversions.
+func (o *Object) String() string { return o.object.String() }
+
+// Int returns the object converted to int according to JavaScript type conversions (parseInt).
+func (o *Object) Int() int { return o.object.Int() }
+
+// Int64 returns the object converted to int64 according to JavaScript type conversions (parseInt).
+func (o *Object) Int64() int64 { return o.object.Int64() }
+
+// Uint64 returns the object converted to uint64 according to JavaScript type conversions (parseInt).
+func (o *Object) Uint64() uint64 { return o.object.Uint64() }
+
+// Float returns the object converted to float64 according to JavaScript type conversions (parseFloat).
+func (o *Object) Float() float64 { return o.object.Float() }
+
+// Interface returns the object converted to interface{}. See table in package comment for details.
+func (o *Object) Interface() interface{} { return o.object.Interface() }
+
+// Unsafe returns the object as an uintptr, which can be converted via unsafe.Pointer. Not intended for public use.
+func (o *Object) Unsafe() uintptr { return o.object.Unsafe() }
+
+// Error encapsulates JavaScript errors. Those are turned into a Go panic and may be recovered, giving an *Error that holds the JavaScript error object.
+type Error struct {
+ *Object
+}
+
+// Error returns the message of the encapsulated JavaScript error object.
+func (err *Error) Error() string {
+ return "JavaScript error: " + err.Get("message").String()
+}
+
+// Stack returns the stack property of the encapsulated JavaScript error object.
+func (err *Error) Stack() string {
+ return err.Get("stack").String()
+}
+
+// Global gives JavaScript's global object ("window" for browsers and "GLOBAL" for Node.js).
+var Global *Object
+
+// Module gives the value of the "module" variable set by Node.js. Hint: Set a module export with 'js.Module.Get("exports").Set("exportName", ...)'.
+var Module *Object
+
+// Undefined gives the JavaScript value "undefined".
+var Undefined *Object
+
+// Debugger gets compiled to JavaScript's "debugger;" statement.
+func Debugger() {}
+
+// InternalObject returns the internal JavaScript object that represents i. Not intended for public use.
+func InternalObject(i interface{}) *Object {
+ return nil
+}
+
+// MakeFunc wraps a function and gives access to the values of JavaScript's "this" and "arguments" keywords.
+func MakeFunc(fn func(this *Object, arguments []*Object) interface{}) *Object {
+ return Global.Call("$makeFunc", InternalObject(fn))
+}
+
+// Keys returns the keys of the given JavaScript object.
+func Keys(o *Object) []string {
+ if o == nil || o == Undefined {
+ return nil
+ }
+ a := Global.Get("Object").Call("keys", o)
+ s := make([]string, a.Length())
+ for i := 0; i < a.Length(); i++ {
+ s[i] = a.Index(i).String()
+ }
+ return s
+}
+
+// MakeWrapper creates a JavaScript object which has wrappers for the exported methods of i. Use explicit getter and setter methods to expose struct fields to JavaScript.
+func MakeWrapper(i interface{}) *Object {
+ v := InternalObject(i)
+ o := Global.Get("Object").New()
+ o.Set("__internal_object__", v)
+ methods := v.Get("constructor").Get("methods")
+ for i := 0; i < methods.Length(); i++ {
+ m := methods.Index(i)
+ if m.Get("pkg").String() != "" { // not exported
+ continue
+ }
+ o.Set(m.Get("name").String(), func(args ...*Object) *Object {
+ return Global.Call("$externalizeFunction", v.Get(m.Get("prop").String()), m.Get("typ"), true).Call("apply", v, args)
+ })
+ }
+ return o
+}
+
+// NewArrayBuffer creates a JavaScript ArrayBuffer from a byte slice.
+func NewArrayBuffer(b []byte) *Object {
+ slice := InternalObject(b)
+ offset := slice.Get("$offset").Int()
+ length := slice.Get("$length").Int()
+ return slice.Get("$array").Get("buffer").Call("slice", offset, offset+length)
+}
+
+// M is a simple map type. It is intended as a shorthand for JavaScript objects (before conversion).
+type M map[string]interface{}
+
+// S is a simple slice type. It is intended as a shorthand for JavaScript arrays (before conversion).
+type S []interface{}
+
+func init() {
+ // avoid dead code elimination
+ e := Error{}
+ _ = e
+}
diff --git a/backend/vendor/github.com/jtolds/gls/LICENSE b/backend/vendor/github.com/jtolds/gls/LICENSE
new file mode 100644
index 00000000..9b4a822d
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2013, Space Monkey, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/backend/vendor/github.com/jtolds/gls/README.md b/backend/vendor/github.com/jtolds/gls/README.md
new file mode 100644
index 00000000..4ebb692f
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/README.md
@@ -0,0 +1,89 @@
+gls
+===
+
+Goroutine local storage
+
+### IMPORTANT NOTE ###
+
+It is my duty to point you to https://blog.golang.org/context, which is how
+Google solves all of the problems you'd perhaps consider using this package
+for at scale.
+
+One downside to Google's approach is that *all* of your functions must have
+a new first argument, but after clearing that hurdle everything else is much
+better.
+
+If you aren't interested in this warning, read on.
+
+### Huhwaht? Why? ###
+
+Every so often, a thread shows up on the
+[golang-nuts](https://groups.google.com/d/forum/golang-nuts) asking for some
+form of goroutine-local-storage, or some kind of goroutine id, or some kind of
+context. There are a few valid use cases for goroutine-local-storage, one of
+the most prominent being log line context. One poster was interested in being
+able to log an HTTP request context id in every log line in the same goroutine
+as the incoming HTTP request, without having to change every library and
+function call he was interested in logging.
+
+This would be pretty useful. Provided that you could get some kind of
+goroutine-local-storage, you could call
+[log.SetOutput](http://golang.org/pkg/log/#SetOutput) with your own logging
+writer that checks goroutine-local-storage for some context information and
+adds that context to your log lines.
+
+But alas, Andrew Gerrand's typically diplomatic answer to the question of
+goroutine-local variables was:
+
+> We wouldn't even be having this discussion if thread local storage wasn't
+> useful. But every feature comes at a cost, and in my opinion the cost of
+> threadlocals far outweighs their benefits. They're just not a good fit for
+> Go.
+
+So, yeah, that makes sense. That's a pretty good reason for why the language
+won't support a specific and (relatively) unuseful feature that requires some
+runtime changes, just for the sake of a little bit of log improvement.
+
+But does Go require runtime changes?
+
+### How it works ###
+
+Go has pretty fantastic introspective and reflective features, but one thing Go
+doesn't give you is any kind of access to the stack pointer, or frame pointer,
+or goroutine id, or anything contextual about your current stack. It gives you
+access to your list of callers, but only along with program counters, which are
+fixed at compile time.
+
+But it does give you the stack.
+
+So, we define 16 special functions and embed base-16 tags into the stack using
+the call order of those 16 functions. Then, we can read our tags back out of
+the stack looking at the callers list.
+
+We then use these tags as an index into a traditional map for implementing
+this library.
+
+### What are people saying? ###
+
+"Wow, that's horrifying."
+
+"This is the most terrible thing I have seen in a very long time."
+
+"Where is it getting a context from? Is this serializing all the requests?
+What the heck is the client being bound to? What are these tags? Why does he
+need callers? Oh god no. No no no."
+
+### Docs ###
+
+Please see the docs at http://godoc.org/github.com/jtolds/gls
+
+### Related ###
+
+If you're okay relying on the string format of the current runtime stacktrace
+including a unique goroutine id (not guaranteed by the spec or anything, but
+very unlikely to change within a Go release), you might be able to squeeze
+out a bit more performance by using this similar library, inspired by some
+code Brad Fitzpatrick wrote for debugging his HTTP/2 library:
+https://github.com/tylerb/gls (in contrast, jtolds/gls doesn't require
+any knowledge of the string format of the runtime stacktrace, which
+probably adds unnecessary overhead).
diff --git a/backend/vendor/github.com/jtolds/gls/context.go b/backend/vendor/github.com/jtolds/gls/context.go
new file mode 100644
index 00000000..618a1710
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/context.go
@@ -0,0 +1,153 @@
+// Package gls implements goroutine-local storage.
+package gls
+
+import (
+ "sync"
+)
+
+var (
+ mgrRegistry = make(map[*ContextManager]bool)
+ mgrRegistryMtx sync.RWMutex
+)
+
+// Values is simply a map of key types to value types. Used by SetValues to
+// set multiple values at once.
+type Values map[interface{}]interface{}
+
+// ContextManager is the main entrypoint for interacting with
+// Goroutine-local-storage. You can have multiple independent ContextManagers
+// at any given time. ContextManagers are usually declared globally for a given
+// class of context variables. You should use NewContextManager for
+// construction.
+type ContextManager struct {
+ mtx sync.Mutex
+ values map[uint]Values
+}
+
+// NewContextManager returns a brand new ContextManager. It also registers the
+// new ContextManager in the ContextManager registry which is used by the Go
+// method. ContextManagers are typically defined globally at package scope.
+func NewContextManager() *ContextManager {
+ mgr := &ContextManager{values: make(map[uint]Values)}
+ mgrRegistryMtx.Lock()
+ defer mgrRegistryMtx.Unlock()
+ mgrRegistry[mgr] = true
+ return mgr
+}
+
+// Unregister removes a ContextManager from the global registry, used by the
+// Go method. Only intended for use when you're completely done with a
+// ContextManager. Use of Unregister at all is rare.
+func (m *ContextManager) Unregister() {
+ mgrRegistryMtx.Lock()
+ defer mgrRegistryMtx.Unlock()
+ delete(mgrRegistry, m)
+}
+
+// SetValues takes a collection of values and a function to call for those
+// values to be set in. Anything further down the stack will have the set
+// values available through GetValue. SetValues will add new values or replace
+// existing values of the same key and will not mutate or change values for
+// previous stack frames.
+// SetValues is slow (makes a copy of all current and new values for the new
+// gls-context) in order to reduce the amount of lookups GetValue requires.
+func (m *ContextManager) SetValues(new_values Values, context_call func()) {
+ if len(new_values) == 0 {
+ context_call()
+ return
+ }
+
+ mutated_keys := make([]interface{}, 0, len(new_values))
+ mutated_vals := make(Values, len(new_values))
+
+ EnsureGoroutineId(func(gid uint) {
+ m.mtx.Lock()
+ state, found := m.values[gid]
+ if !found {
+ state = make(Values, len(new_values))
+ m.values[gid] = state
+ }
+ m.mtx.Unlock()
+
+ for key, new_val := range new_values {
+ mutated_keys = append(mutated_keys, key)
+ if old_val, ok := state[key]; ok {
+ mutated_vals[key] = old_val
+ }
+ state[key] = new_val
+ }
+
+ defer func() {
+ if !found {
+ m.mtx.Lock()
+ delete(m.values, gid)
+ m.mtx.Unlock()
+ return
+ }
+
+ for _, key := range mutated_keys {
+ if val, ok := mutated_vals[key]; ok {
+ state[key] = val
+ } else {
+ delete(state, key)
+ }
+ }
+ }()
+
+ context_call()
+ })
+}
+
+// GetValue will return a previously set value, provided that the value was set
+// by SetValues somewhere higher up the stack. If the value is not found, ok
+// will be false.
+func (m *ContextManager) GetValue(key interface{}) (
+ value interface{}, ok bool) {
+ gid, ok := GetGoroutineId()
+ if !ok {
+ return nil, false
+ }
+
+ m.mtx.Lock()
+ state, found := m.values[gid]
+ m.mtx.Unlock()
+
+ if !found {
+ return nil, false
+ }
+ value, ok = state[key]
+ return value, ok
+}
+
+func (m *ContextManager) getValues() Values {
+ gid, ok := GetGoroutineId()
+ if !ok {
+ return nil
+ }
+ m.mtx.Lock()
+ state, _ := m.values[gid]
+ m.mtx.Unlock()
+ return state
+}
+
+// Go preserves ContextManager values and Goroutine-local-storage across new
+// goroutine invocations. The Go method makes a copy of all existing values on
+// all registered context managers and makes sure they are still set after
+// kicking off the provided function in a new goroutine. If you don't use this
+// Go method instead of the standard 'go' keyword, you will lose values in
+// ContextManagers, as goroutines have brand new stacks.
+func Go(cb func()) {
+ mgrRegistryMtx.RLock()
+ defer mgrRegistryMtx.RUnlock()
+
+ for mgr := range mgrRegistry {
+ values := mgr.getValues()
+ if len(values) > 0 {
+ cb = func(mgr *ContextManager, cb func()) func() {
+ return func() { mgr.SetValues(values, cb) }
+ }(mgr, cb)
+ }
+ }
+
+ go cb()
+}
diff --git a/backend/vendor/github.com/jtolds/gls/gen_sym.go b/backend/vendor/github.com/jtolds/gls/gen_sym.go
new file mode 100644
index 00000000..7f615cce
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/gen_sym.go
@@ -0,0 +1,21 @@
+package gls
+
+import (
+ "sync"
+)
+
+var (
+ keyMtx sync.Mutex
+ keyCounter uint64
+)
+
+// ContextKey is a throwaway value you can use as a key to a ContextManager
+type ContextKey struct{ id uint64 }
+
+// GenSym will return a brand new, never-before-used ContextKey
+func GenSym() ContextKey {
+ keyMtx.Lock()
+ defer keyMtx.Unlock()
+ keyCounter += 1
+ return ContextKey{id: keyCounter}
+}
diff --git a/backend/vendor/github.com/jtolds/gls/gid.go b/backend/vendor/github.com/jtolds/gls/gid.go
new file mode 100644
index 00000000..c16bf3a5
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/gid.go
@@ -0,0 +1,25 @@
+package gls
+
+var (
+ stackTagPool = &idPool{}
+)
+
+// Will return this goroutine's identifier if set. If you always need a
+// goroutine identifier, you should use EnsureGoroutineId which will make one
+// if there isn't one already.
+func GetGoroutineId() (gid uint, ok bool) {
+ return readStackTag()
+}
+
+// Will call cb with the current goroutine identifier. If one hasn't already
+// been generated, one will be created and set first. The goroutine identifier
+// might be invalid after cb returns.
+func EnsureGoroutineId(cb func(gid uint)) {
+ if gid, ok := readStackTag(); ok {
+ cb(gid)
+ return
+ }
+ gid := stackTagPool.Acquire()
+ defer stackTagPool.Release(gid)
+ addStackTag(gid, func() { cb(gid) })
+}
diff --git a/backend/vendor/github.com/jtolds/gls/id_pool.go b/backend/vendor/github.com/jtolds/gls/id_pool.go
new file mode 100644
index 00000000..b7974ae0
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/id_pool.go
@@ -0,0 +1,34 @@
+package gls
+
+// though this could probably be better at keeping ids smaller, the goal of
+// this class is to keep a registry of the smallest unique integer ids
+// per-process possible
+
+import (
+ "sync"
+)
+
+type idPool struct {
+ mtx sync.Mutex
+ released []uint
+ max_id uint
+}
+
+func (p *idPool) Acquire() (id uint) {
+ p.mtx.Lock()
+ defer p.mtx.Unlock()
+ if len(p.released) > 0 {
+ id = p.released[len(p.released)-1]
+ p.released = p.released[:len(p.released)-1]
+ return id
+ }
+ id = p.max_id
+ p.max_id++
+ return id
+}
+
+func (p *idPool) Release(id uint) {
+ p.mtx.Lock()
+ defer p.mtx.Unlock()
+ p.released = append(p.released, id)
+}
diff --git a/backend/vendor/github.com/jtolds/gls/stack_tags.go b/backend/vendor/github.com/jtolds/gls/stack_tags.go
new file mode 100644
index 00000000..37bbd334
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/stack_tags.go
@@ -0,0 +1,147 @@
+package gls
+
+// so, basically, we're going to encode integer tags in base-16 on the stack
+
+const (
+ bitWidth = 4
+ stackBatchSize = 16
+)
+
+var (
+ pc_lookup = make(map[uintptr]int8, 17)
+ mark_lookup [16]func(uint, func())
+)
+
+func init() {
+ setEntries := func(f func(uint, func()), v int8) {
+ var ptr uintptr
+ f(0, func() {
+ ptr = findPtr()
+ })
+ pc_lookup[ptr] = v
+ if v >= 0 {
+ mark_lookup[v] = f
+ }
+ }
+ setEntries(github_com_jtolds_gls_markS, -0x1)
+ setEntries(github_com_jtolds_gls_mark0, 0x0)
+ setEntries(github_com_jtolds_gls_mark1, 0x1)
+ setEntries(github_com_jtolds_gls_mark2, 0x2)
+ setEntries(github_com_jtolds_gls_mark3, 0x3)
+ setEntries(github_com_jtolds_gls_mark4, 0x4)
+ setEntries(github_com_jtolds_gls_mark5, 0x5)
+ setEntries(github_com_jtolds_gls_mark6, 0x6)
+ setEntries(github_com_jtolds_gls_mark7, 0x7)
+ setEntries(github_com_jtolds_gls_mark8, 0x8)
+ setEntries(github_com_jtolds_gls_mark9, 0x9)
+ setEntries(github_com_jtolds_gls_markA, 0xa)
+ setEntries(github_com_jtolds_gls_markB, 0xb)
+ setEntries(github_com_jtolds_gls_markC, 0xc)
+ setEntries(github_com_jtolds_gls_markD, 0xd)
+ setEntries(github_com_jtolds_gls_markE, 0xe)
+ setEntries(github_com_jtolds_gls_markF, 0xf)
+}
+
+func addStackTag(tag uint, context_call func()) {
+ if context_call == nil {
+ return
+ }
+ github_com_jtolds_gls_markS(tag, context_call)
+}
+
+// these private methods are named this horrendous name so gopherjs support
+// is easier. it shouldn't add any runtime cost in non-js builds.
+
+//go:noinline
+func github_com_jtolds_gls_markS(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark0(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark1(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark2(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark3(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark4(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark5(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark6(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark7(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark8(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_mark9(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_markA(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_markB(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_markC(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_markD(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_markE(tag uint, cb func()) { _m(tag, cb) }
+
+//go:noinline
+func github_com_jtolds_gls_markF(tag uint, cb func()) { _m(tag, cb) }
+
+func _m(tag_remainder uint, cb func()) {
+ if tag_remainder == 0 {
+ cb()
+ } else {
+ mark_lookup[tag_remainder&0xf](tag_remainder>>bitWidth, cb)
+ }
+}
+
+func readStackTag() (tag uint, ok bool) {
+ var current_tag uint
+ offset := 0
+ for {
+ batch, next_offset := getStack(offset, stackBatchSize)
+ for _, pc := range batch {
+ val, ok := pc_lookup[pc]
+ if !ok {
+ continue
+ }
+ if val < 0 {
+ return current_tag, true
+ }
+ current_tag <<= bitWidth
+ current_tag += uint(val)
+ }
+ if next_offset == 0 {
+ break
+ }
+ offset = next_offset
+ }
+ return 0, false
+}
+
+func (m *ContextManager) preventInlining() {
+ // dunno if findPtr or getStack are likely to get inlined in a future release
+ // of go, but if they are inlined and their callers are inlined, that could
+ // hork some things. let's do our best to explain to the compiler that we
+ // really don't want those two functions inlined by saying they could change
+ // at any time. assumes preventInlining doesn't get compiled out.
+ // this whole thing is probably overkill.
+ findPtr = m.values[0][0].(func() uintptr)
+ getStack = m.values[0][1].(func(int, int) ([]uintptr, int))
+}
diff --git a/backend/vendor/github.com/jtolds/gls/stack_tags_js.go b/backend/vendor/github.com/jtolds/gls/stack_tags_js.go
new file mode 100644
index 00000000..c4e8b801
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/stack_tags_js.go
@@ -0,0 +1,75 @@
+// +build js
+
+package gls
+
+// This file is used for GopherJS builds, which don't have normal runtime
+// stack trace support
+
+import (
+ "strconv"
+ "strings"
+
+ "github.com/gopherjs/gopherjs/js"
+)
+
+const (
+ jsFuncNamePrefix = "github_com_jtolds_gls_mark"
+)
+
+func jsMarkStack() (f []uintptr) {
+ lines := strings.Split(
+ js.Global.Get("Error").New().Get("stack").String(), "\n")
+ f = make([]uintptr, 0, len(lines))
+ for i, line := range lines {
+ line = strings.TrimSpace(line)
+ if line == "" {
+ continue
+ }
+ if i == 0 {
+ if line != "Error" {
+ panic("didn't understand js stack trace")
+ }
+ continue
+ }
+ fields := strings.Fields(line)
+ if len(fields) < 2 || fields[0] != "at" {
+ panic("didn't understand js stack trace")
+ }
+
+ pos := strings.Index(fields[1], jsFuncNamePrefix)
+ if pos < 0 {
+ continue
+ }
+ pos += len(jsFuncNamePrefix)
+ if pos >= len(fields[1]) {
+ panic("didn't understand js stack trace")
+ }
+ char := string(fields[1][pos])
+ switch char {
+ case "S":
+ f = append(f, uintptr(0))
+ default:
+ val, err := strconv.ParseUint(char, 16, 8)
+ if err != nil {
+ panic("didn't understand js stack trace")
+ }
+ f = append(f, uintptr(val)+1)
+ }
+ }
+ return f
+}
+
+// variables to prevent inlining
+var (
+ findPtr = func() uintptr {
+ funcs := jsMarkStack()
+ if len(funcs) == 0 {
+ panic("failed to find function pointer")
+ }
+ return funcs[0]
+ }
+
+ getStack = func(offset, amount int) (stack []uintptr, next_offset int) {
+ return jsMarkStack(), 0
+ }
+)
diff --git a/backend/vendor/github.com/jtolds/gls/stack_tags_main.go b/backend/vendor/github.com/jtolds/gls/stack_tags_main.go
new file mode 100644
index 00000000..4da89e44
--- /dev/null
+++ b/backend/vendor/github.com/jtolds/gls/stack_tags_main.go
@@ -0,0 +1,30 @@
+// +build !js
+
+package gls
+
+// This file is used for standard Go builds, which have the expected runtime
+// support
+
+import (
+ "runtime"
+)
+
+var (
+ findPtr = func() uintptr {
+ var pc [1]uintptr
+ n := runtime.Callers(4, pc[:])
+ if n != 1 {
+ panic("failed to find function pointer")
+ }
+ return pc[0]
+ }
+
+ getStack = func(offset, amount int) (stack []uintptr, next_offset int) {
+ stack = make([]uintptr, amount)
+ stack = stack[:runtime.Callers(offset, stack)]
+ if len(stack) < amount {
+ return stack, 0
+ }
+ return stack, offset + len(stack)
+ }
+)
diff --git a/backend/vendor/github.com/leodido/go-urn/.gitignore b/backend/vendor/github.com/leodido/go-urn/.gitignore
new file mode 100644
index 00000000..a30b5ab0
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/.gitignore
@@ -0,0 +1,9 @@
+*.exe
+*.dll
+*.so
+*.dylib
+
+*.test
+
+*.out
+*.txt
\ No newline at end of file
diff --git a/backend/vendor/github.com/leodido/go-urn/.travis.yml b/backend/vendor/github.com/leodido/go-urn/.travis.yml
new file mode 100644
index 00000000..913b6418
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/.travis.yml
@@ -0,0 +1,15 @@
+language: go
+
+go:
+ - 1.9.x
+ - 1.10.x
+ - tip
+
+before_install:
+ - go get -t -v ./...
+
+script:
+ - go test -race -coverprofile=coverage.txt -covermode=atomic
+
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
\ No newline at end of file
diff --git a/backend/vendor/github.com/leodido/go-urn/README.md b/backend/vendor/github.com/leodido/go-urn/README.md
new file mode 100644
index 00000000..cc902ec0
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/README.md
@@ -0,0 +1,55 @@
+[](https://travis-ci.org/leodido/go-urn) [](https://codecov.io/gh/leodido/go-urn) [](https://godoc.org/github.com/leodido/go-urn)
+
+**A parser for URNs**.
+
+> As seen on [RFC 2141](https://tools.ietf.org/html/rfc2141#ref-1).
+
+[API documentation](https://godoc.org/github.com/leodido/go-urn).
+
+## Installation
+
+```
+go get github.com/leodido/go-urn
+```
+
+## Performances
+
+This implementation results to be really fast.
+
+Usually below ½ microsecond on my machine[1](#mymachine).
+
+Notice it also performs, while parsing:
+
+1. fine-grained and informative erroring
+2. specific-string normalization
+
+```
+ok/00/urn:a:b______________________________________/-4 20000000 265 ns/op 182 B/op 6 allocs/op
+ok/01/URN:foo:a123,456_____________________________/-4 30000000 296 ns/op 200 B/op 6 allocs/op
+ok/02/urn:foo:a123%2c456___________________________/-4 20000000 331 ns/op 208 B/op 6 allocs/op
+ok/03/urn:ietf:params:scim:schemas:core:2.0:User___/-4 20000000 430 ns/op 280 B/op 6 allocs/op
+ok/04/urn:ietf:params:scim:schemas:extension:enterp/-4 20000000 411 ns/op 312 B/op 6 allocs/op
+ok/05/urn:ietf:params:scim:schemas:extension:enterp/-4 20000000 472 ns/op 344 B/op 6 allocs/op
+ok/06/urn:burnout:nss______________________________/-4 30000000 257 ns/op 192 B/op 6 allocs/op
+ok/07/urn:abcdefghilmnopqrstuvzabcdefghilm:x_______/-4 20000000 375 ns/op 213 B/op 6 allocs/op
+ok/08/urn:urnurnurn:urn____________________________/-4 30000000 265 ns/op 197 B/op 6 allocs/op
+ok/09/urn:ciao:@!=%2c(xyz)+a,b.*@g=$_'_____________/-4 20000000 307 ns/op 248 B/op 6 allocs/op
+ok/10/URN:x:abc%1dz%2f%3az_________________________/-4 30000000 259 ns/op 212 B/op 6 allocs/op
+no/11/URN:-xxx:x___________________________________/-4 20000000 445 ns/op 320 B/op 6 allocs/op
+no/12/urn::colon:nss_______________________________/-4 20000000 461 ns/op 320 B/op 6 allocs/op
+no/13/urn:abcdefghilmnopqrstuvzabcdefghilmn:specifi/-4 10000000 660 ns/op 320 B/op 6 allocs/op
+no/14/URN:a!?:x____________________________________/-4 20000000 507 ns/op 320 B/op 6 allocs/op
+no/15/urn:urn:NSS__________________________________/-4 20000000 429 ns/op 288 B/op 6 allocs/op
+no/16/urn:white_space:NSS__________________________/-4 20000000 482 ns/op 320 B/op 6 allocs/op
+no/17/urn:concat:no_spaces_________________________/-4 20000000 539 ns/op 328 B/op 7 allocs/op
+no/18/urn:a:/______________________________________/-4 20000000 470 ns/op 320 B/op 7 allocs/op
+no/19/urn:UrN:NSS__________________________________/-4 20000000 399 ns/op 288 B/op 6 allocs/op
+```
+
+---
+
+* [1]: Intel Core i7-7600U CPU @ 2.80GHz
+
+---
+
+[](https://github.com/igrigorik/ga-beacon)
\ No newline at end of file
diff --git a/backend/vendor/github.com/leodido/go-urn/machine.go b/backend/vendor/github.com/leodido/go-urn/machine.go
new file mode 100644
index 00000000..d621ea6e
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/machine.go
@@ -0,0 +1,1670 @@
+package urn
+
+import (
+ "fmt"
+)
+
+var (
+ errPrefix = "expecting the prefix to be the \"urn\" string (whatever case) [col %d]"
+ errIdentifier = "expecting the identifier to be string (1..31 alnum chars, also containing dashes but not at its start) [col %d]"
+ errSpecificString = "expecting the specific string to be a string containing alnum, hex, or others ([()+,-.:=@;$_!*']) chars [col %d]"
+ errNoUrnWithinID = "expecting the identifier to not contain the \"urn\" reserved string [col %d]"
+ errHex = "expecting the specific string hex chars to be well-formed (%%alnum{2}) [col %d]"
+ errParse = "parsing error [col %d]"
+)
+
+
+const start int = 1
+const first_final int = 44
+
+const en_fail int = 46
+const en_main int = 1
+
+
+// Machine is the interface representing the FSM
+type Machine interface {
+ Error() error
+ Parse(input []byte) (*URN, error)
+}
+
+type machine struct {
+ data []byte
+ cs int
+ p, pe, eof, pb int
+ err error
+ tolower []int
+}
+
+// NewMachine creates a new FSM able to parse RFC 2141 strings.
+func NewMachine() Machine {
+ m := &machine{}
+
+ return m
+}
+
+// Err returns the error that occurred on the last call to Parse.
+//
+// If the result is nil, then the line was parsed successfully.
+func (m *machine) Error() error {
+ return m.err
+}
+
+func (m *machine) text() []byte {
+ return m.data[m.pb:m.p]
+}
+
+// Parse parses the input byte array as a RFC 2141 string.
+func (m *machine) Parse(input []byte) (*URN, error) {
+ m.data = input
+ m.p = 0
+ m.pb = 0
+ m.pe = len(input)
+ m.eof = len(input)
+ m.err = nil
+ m.tolower = []int{}
+ output := &URN{}
+
+ {
+ m.cs = start
+ }
+
+
+ {
+ if (m.p) == (m.pe) {
+ goto _test_eof
+ }
+ switch m.cs {
+ case 1:
+ goto st_case_1
+ case 0:
+ goto st_case_0
+ case 2:
+ goto st_case_2
+ case 3:
+ goto st_case_3
+ case 4:
+ goto st_case_4
+ case 5:
+ goto st_case_5
+ case 6:
+ goto st_case_6
+ case 7:
+ goto st_case_7
+ case 8:
+ goto st_case_8
+ case 9:
+ goto st_case_9
+ case 10:
+ goto st_case_10
+ case 11:
+ goto st_case_11
+ case 12:
+ goto st_case_12
+ case 13:
+ goto st_case_13
+ case 14:
+ goto st_case_14
+ case 15:
+ goto st_case_15
+ case 16:
+ goto st_case_16
+ case 17:
+ goto st_case_17
+ case 18:
+ goto st_case_18
+ case 19:
+ goto st_case_19
+ case 20:
+ goto st_case_20
+ case 21:
+ goto st_case_21
+ case 22:
+ goto st_case_22
+ case 23:
+ goto st_case_23
+ case 24:
+ goto st_case_24
+ case 25:
+ goto st_case_25
+ case 26:
+ goto st_case_26
+ case 27:
+ goto st_case_27
+ case 28:
+ goto st_case_28
+ case 29:
+ goto st_case_29
+ case 30:
+ goto st_case_30
+ case 31:
+ goto st_case_31
+ case 32:
+ goto st_case_32
+ case 33:
+ goto st_case_33
+ case 34:
+ goto st_case_34
+ case 35:
+ goto st_case_35
+ case 36:
+ goto st_case_36
+ case 37:
+ goto st_case_37
+ case 38:
+ goto st_case_38
+ case 44:
+ goto st_case_44
+ case 39:
+ goto st_case_39
+ case 40:
+ goto st_case_40
+ case 45:
+ goto st_case_45
+ case 41:
+ goto st_case_41
+ case 42:
+ goto st_case_42
+ case 43:
+ goto st_case_43
+ case 46:
+ goto st_case_46
+ }
+ goto st_out
+ st_case_1:
+ switch (m.data)[(m.p)] {
+ case 85:
+ goto tr1
+ case 117:
+ goto tr1
+ }
+ goto tr0
+ tr0:
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr3:
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr6:
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr41:
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr44:
+ m.err = fmt.Errorf(errHex, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr50:
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ tr52:
+ m.err = fmt.Errorf(errNoUrnWithinID, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ goto st0
+ st_case_0:
+ st0:
+ m.cs = 0
+ goto _out
+ tr1:
+ m.pb = m.p
+
+ goto st2
+ st2:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof2
+ }
+ st_case_2:
+ switch (m.data)[(m.p)] {
+ case 82:
+ goto st3
+ case 114:
+ goto st3
+ }
+ goto tr0
+ st3:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof3
+ }
+ st_case_3:
+ switch (m.data)[(m.p)] {
+ case 78:
+ goto st4
+ case 110:
+ goto st4
+ }
+ goto tr3
+ st4:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof4
+ }
+ st_case_4:
+ if (m.data)[(m.p)] == 58 {
+ goto tr5
+ }
+ goto tr0
+ tr5:
+ output.prefix = string(m.text())
+
+ goto st5
+ st5:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof5
+ }
+ st_case_5:
+ switch (m.data)[(m.p)] {
+ case 85:
+ goto tr8
+ case 117:
+ goto tr8
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto tr7
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto tr7
+ }
+ default:
+ goto tr7
+ }
+ goto tr6
+ tr7:
+ m.pb = m.p
+
+ goto st6
+ st6:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof6
+ }
+ st_case_6:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st7
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st7
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st7
+ }
+ default:
+ goto st7
+ }
+ goto tr6
+ st7:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof7
+ }
+ st_case_7:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st8
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st8
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st8
+ }
+ default:
+ goto st8
+ }
+ goto tr6
+ st8:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof8
+ }
+ st_case_8:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st9
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st9
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st9
+ }
+ default:
+ goto st9
+ }
+ goto tr6
+ st9:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof9
+ }
+ st_case_9:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st10
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st10
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st10
+ }
+ default:
+ goto st10
+ }
+ goto tr6
+ st10:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof10
+ }
+ st_case_10:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st11
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st11
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st11
+ }
+ default:
+ goto st11
+ }
+ goto tr6
+ st11:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof11
+ }
+ st_case_11:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st12
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st12
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st12
+ }
+ default:
+ goto st12
+ }
+ goto tr6
+ st12:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof12
+ }
+ st_case_12:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st13
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st13
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st13
+ }
+ default:
+ goto st13
+ }
+ goto tr6
+ st13:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof13
+ }
+ st_case_13:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st14
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st14
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st14
+ }
+ default:
+ goto st14
+ }
+ goto tr6
+ st14:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof14
+ }
+ st_case_14:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st15
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st15
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st15
+ }
+ default:
+ goto st15
+ }
+ goto tr6
+ st15:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof15
+ }
+ st_case_15:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st16
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st16
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st16
+ }
+ default:
+ goto st16
+ }
+ goto tr6
+ st16:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof16
+ }
+ st_case_16:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st17
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st17
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st17
+ }
+ default:
+ goto st17
+ }
+ goto tr6
+ st17:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof17
+ }
+ st_case_17:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st18
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st18
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st18
+ }
+ default:
+ goto st18
+ }
+ goto tr6
+ st18:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof18
+ }
+ st_case_18:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st19
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st19
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st19
+ }
+ default:
+ goto st19
+ }
+ goto tr6
+ st19:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof19
+ }
+ st_case_19:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st20
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st20
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st20
+ }
+ default:
+ goto st20
+ }
+ goto tr6
+ st20:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof20
+ }
+ st_case_20:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st21
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st21
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st21
+ }
+ default:
+ goto st21
+ }
+ goto tr6
+ st21:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof21
+ }
+ st_case_21:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st22
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st22
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st22
+ }
+ default:
+ goto st22
+ }
+ goto tr6
+ st22:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof22
+ }
+ st_case_22:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st23
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st23
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st23
+ }
+ default:
+ goto st23
+ }
+ goto tr6
+ st23:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof23
+ }
+ st_case_23:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st24
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st24
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st24
+ }
+ default:
+ goto st24
+ }
+ goto tr6
+ st24:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof24
+ }
+ st_case_24:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st25
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st25
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st25
+ }
+ default:
+ goto st25
+ }
+ goto tr6
+ st25:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof25
+ }
+ st_case_25:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st26
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st26
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st26
+ }
+ default:
+ goto st26
+ }
+ goto tr6
+ st26:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof26
+ }
+ st_case_26:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st27
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st27
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st27
+ }
+ default:
+ goto st27
+ }
+ goto tr6
+ st27:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof27
+ }
+ st_case_27:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st28
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st28
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st28
+ }
+ default:
+ goto st28
+ }
+ goto tr6
+ st28:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof28
+ }
+ st_case_28:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st29
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st29
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st29
+ }
+ default:
+ goto st29
+ }
+ goto tr6
+ st29:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof29
+ }
+ st_case_29:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st30
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st30
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st30
+ }
+ default:
+ goto st30
+ }
+ goto tr6
+ st30:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof30
+ }
+ st_case_30:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st31
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st31
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st31
+ }
+ default:
+ goto st31
+ }
+ goto tr6
+ st31:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof31
+ }
+ st_case_31:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st32
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st32
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st32
+ }
+ default:
+ goto st32
+ }
+ goto tr6
+ st32:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof32
+ }
+ st_case_32:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st33
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st33
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st33
+ }
+ default:
+ goto st33
+ }
+ goto tr6
+ st33:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof33
+ }
+ st_case_33:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st34
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st34
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st34
+ }
+ default:
+ goto st34
+ }
+ goto tr6
+ st34:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof34
+ }
+ st_case_34:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st35
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st35
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st35
+ }
+ default:
+ goto st35
+ }
+ goto tr6
+ st35:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof35
+ }
+ st_case_35:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st36
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st36
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st36
+ }
+ default:
+ goto st36
+ }
+ goto tr6
+ st36:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof36
+ }
+ st_case_36:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st37
+ case 58:
+ goto tr10
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st37
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st37
+ }
+ default:
+ goto st37
+ }
+ goto tr6
+ st37:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof37
+ }
+ st_case_37:
+ if (m.data)[(m.p)] == 58 {
+ goto tr10
+ }
+ goto tr6
+ tr10:
+ output.ID = string(m.text())
+
+ goto st38
+ st38:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof38
+ }
+ st_case_38:
+ switch (m.data)[(m.p)] {
+ case 33:
+ goto tr42
+ case 36:
+ goto tr42
+ case 37:
+ goto tr43
+ case 61:
+ goto tr42
+ case 95:
+ goto tr42
+ }
+ switch {
+ case (m.data)[(m.p)] < 48:
+ if 39 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 46 {
+ goto tr42
+ }
+ case (m.data)[(m.p)] > 59:
+ switch {
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto tr42
+ }
+ case (m.data)[(m.p)] >= 64:
+ goto tr42
+ }
+ default:
+ goto tr42
+ }
+ goto tr41
+ tr42:
+ m.pb = m.p
+
+ goto st44
+ st44:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof44
+ }
+ st_case_44:
+ switch (m.data)[(m.p)] {
+ case 33:
+ goto st44
+ case 36:
+ goto st44
+ case 37:
+ goto st39
+ case 61:
+ goto st44
+ case 95:
+ goto st44
+ }
+ switch {
+ case (m.data)[(m.p)] < 48:
+ if 39 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 46 {
+ goto st44
+ }
+ case (m.data)[(m.p)] > 59:
+ switch {
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st44
+ }
+ case (m.data)[(m.p)] >= 64:
+ goto st44
+ }
+ default:
+ goto st44
+ }
+ goto tr41
+ tr43:
+ m.pb = m.p
+
+ goto st39
+ st39:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof39
+ }
+ st_case_39:
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st40
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st40
+ }
+ default:
+ goto tr46
+ }
+ goto tr44
+ tr46:
+ m.tolower = append(m.tolower, m.p-m.pb)
+
+ goto st40
+ st40:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof40
+ }
+ st_case_40:
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st45
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st45
+ }
+ default:
+ goto tr48
+ }
+ goto tr44
+ tr48:
+ m.tolower = append(m.tolower, m.p-m.pb)
+
+ goto st45
+ st45:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof45
+ }
+ st_case_45:
+ switch (m.data)[(m.p)] {
+ case 33:
+ goto st44
+ case 36:
+ goto st44
+ case 37:
+ goto st39
+ case 61:
+ goto st44
+ case 95:
+ goto st44
+ }
+ switch {
+ case (m.data)[(m.p)] < 48:
+ if 39 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 46 {
+ goto st44
+ }
+ case (m.data)[(m.p)] > 59:
+ switch {
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st44
+ }
+ case (m.data)[(m.p)] >= 64:
+ goto st44
+ }
+ default:
+ goto st44
+ }
+ goto tr44
+ tr8:
+ m.pb = m.p
+
+ goto st41
+ st41:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof41
+ }
+ st_case_41:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st7
+ case 58:
+ goto tr10
+ case 82:
+ goto st42
+ case 114:
+ goto st42
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st7
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st7
+ }
+ default:
+ goto st7
+ }
+ goto tr6
+ st42:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof42
+ }
+ st_case_42:
+ switch (m.data)[(m.p)] {
+ case 45:
+ goto st8
+ case 58:
+ goto tr10
+ case 78:
+ goto st43
+ case 110:
+ goto st43
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st8
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st8
+ }
+ default:
+ goto st8
+ }
+ goto tr50
+ st43:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof43
+ }
+ st_case_43:
+ if (m.data)[(m.p)] == 45 {
+ goto st9
+ }
+ switch {
+ case (m.data)[(m.p)] < 65:
+ if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+ goto st9
+ }
+ case (m.data)[(m.p)] > 90:
+ if 97 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 122 {
+ goto st9
+ }
+ default:
+ goto st9
+ }
+ goto tr52
+ st46:
+ if (m.p)++; (m.p) == (m.pe) {
+ goto _test_eof46
+ }
+ st_case_46:
+ switch (m.data)[(m.p)] {
+ case 10:
+ goto st0
+ case 13:
+ goto st0
+ }
+ goto st46
+ st_out:
+ _test_eof2:
+ m.cs = 2
+ goto _test_eof
+ _test_eof3:
+ m.cs = 3
+ goto _test_eof
+ _test_eof4:
+ m.cs = 4
+ goto _test_eof
+ _test_eof5:
+ m.cs = 5
+ goto _test_eof
+ _test_eof6:
+ m.cs = 6
+ goto _test_eof
+ _test_eof7:
+ m.cs = 7
+ goto _test_eof
+ _test_eof8:
+ m.cs = 8
+ goto _test_eof
+ _test_eof9:
+ m.cs = 9
+ goto _test_eof
+ _test_eof10:
+ m.cs = 10
+ goto _test_eof
+ _test_eof11:
+ m.cs = 11
+ goto _test_eof
+ _test_eof12:
+ m.cs = 12
+ goto _test_eof
+ _test_eof13:
+ m.cs = 13
+ goto _test_eof
+ _test_eof14:
+ m.cs = 14
+ goto _test_eof
+ _test_eof15:
+ m.cs = 15
+ goto _test_eof
+ _test_eof16:
+ m.cs = 16
+ goto _test_eof
+ _test_eof17:
+ m.cs = 17
+ goto _test_eof
+ _test_eof18:
+ m.cs = 18
+ goto _test_eof
+ _test_eof19:
+ m.cs = 19
+ goto _test_eof
+ _test_eof20:
+ m.cs = 20
+ goto _test_eof
+ _test_eof21:
+ m.cs = 21
+ goto _test_eof
+ _test_eof22:
+ m.cs = 22
+ goto _test_eof
+ _test_eof23:
+ m.cs = 23
+ goto _test_eof
+ _test_eof24:
+ m.cs = 24
+ goto _test_eof
+ _test_eof25:
+ m.cs = 25
+ goto _test_eof
+ _test_eof26:
+ m.cs = 26
+ goto _test_eof
+ _test_eof27:
+ m.cs = 27
+ goto _test_eof
+ _test_eof28:
+ m.cs = 28
+ goto _test_eof
+ _test_eof29:
+ m.cs = 29
+ goto _test_eof
+ _test_eof30:
+ m.cs = 30
+ goto _test_eof
+ _test_eof31:
+ m.cs = 31
+ goto _test_eof
+ _test_eof32:
+ m.cs = 32
+ goto _test_eof
+ _test_eof33:
+ m.cs = 33
+ goto _test_eof
+ _test_eof34:
+ m.cs = 34
+ goto _test_eof
+ _test_eof35:
+ m.cs = 35
+ goto _test_eof
+ _test_eof36:
+ m.cs = 36
+ goto _test_eof
+ _test_eof37:
+ m.cs = 37
+ goto _test_eof
+ _test_eof38:
+ m.cs = 38
+ goto _test_eof
+ _test_eof44:
+ m.cs = 44
+ goto _test_eof
+ _test_eof39:
+ m.cs = 39
+ goto _test_eof
+ _test_eof40:
+ m.cs = 40
+ goto _test_eof
+ _test_eof45:
+ m.cs = 45
+ goto _test_eof
+ _test_eof41:
+ m.cs = 41
+ goto _test_eof
+ _test_eof42:
+ m.cs = 42
+ goto _test_eof
+ _test_eof43:
+ m.cs = 43
+ goto _test_eof
+ _test_eof46:
+ m.cs = 46
+ goto _test_eof
+
+ _test_eof:
+ {
+ }
+ if (m.p) == (m.eof) {
+ switch m.cs {
+ case 44, 45:
+ raw := m.text()
+ output.SS = string(raw)
+ // Iterate upper letters lowering them
+ for _, i := range m.tolower {
+ raw[i] = raw[i] + 32
+ }
+ output.norm = string(raw)
+
+ case 1, 2, 4:
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 3:
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 41:
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 38:
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 42:
+ m.err = fmt.Errorf(errPrefix, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 43:
+ m.err = fmt.Errorf(errNoUrnWithinID, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ case 39, 40:
+ m.err = fmt.Errorf(errHex, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ m.err = fmt.Errorf(errParse, m.p)
+ (m.p)--
+
+ {
+ goto st46
+ }
+
+ }
+ }
+
+ _out:
+ {
+ }
+ }
+
+ if m.cs < first_final || m.cs == en_fail {
+ return nil, m.err
+ }
+
+ return output, nil
+}
diff --git a/backend/vendor/github.com/leodido/go-urn/machine.go.rl b/backend/vendor/github.com/leodido/go-urn/machine.go.rl
new file mode 100644
index 00000000..3bc05a65
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/machine.go.rl
@@ -0,0 +1,159 @@
+package urn
+
+import (
+ "fmt"
+)
+
+var (
+ errPrefix = "expecting the prefix to be the \"urn\" string (whatever case) [col %d]"
+ errIdentifier = "expecting the identifier to be string (1..31 alnum chars, also containing dashes but not at its start) [col %d]"
+ errSpecificString = "expecting the specific string to be a string containing alnum, hex, or others ([()+,-.:=@;$_!*']) chars [col %d]"
+ errNoUrnWithinID = "expecting the identifier to not contain the \"urn\" reserved string [col %d]"
+ errHex = "expecting the specific string hex chars to be well-formed (%%alnum{2}) [col %d]"
+ errParse = "parsing error [col %d]"
+)
+
+%%{
+machine urn;
+
+# unsigned alphabet
+alphtype uint8;
+
+action mark {
+ m.pb = m.p
+}
+
+action tolower {
+ m.tolower = append(m.tolower, m.p - m.pb)
+}
+
+action set_pre {
+ output.prefix = string(m.text())
+}
+
+action set_nid {
+ output.ID = string(m.text())
+}
+
+action set_nss {
+ raw := m.text()
+ output.SS = string(raw)
+ // Iterate upper letters lowering them
+ for _, i := range m.tolower {
+ raw[i] = raw[i] + 32
+ }
+ output.norm = string(raw)
+}
+
+action err_pre {
+ m.err = fmt.Errorf(errPrefix, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_nid {
+ m.err = fmt.Errorf(errIdentifier, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_nss {
+ m.err = fmt.Errorf(errSpecificString, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_urn {
+ m.err = fmt.Errorf(errNoUrnWithinID, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_hex {
+ m.err = fmt.Errorf(errHex, m.p)
+ fhold;
+ fgoto fail;
+}
+
+action err_parse {
+ m.err = fmt.Errorf(errParse, m.p)
+ fhold;
+ fgoto fail;
+}
+
+pre = ([uU][rR][nN] @err(err_pre)) >mark %set_pre;
+
+nid = (alnum >mark (alnum | '-'){0,31}) %set_nid;
+
+hex = '%' (digit | lower | upper >tolower){2} $err(err_hex);
+
+sss = (alnum | [()+,\-.:=@;$_!*']);
+
+nss = (sss | hex)+ $err(err_nss);
+
+fail := (any - [\n\r])* @err{ fgoto main; };
+
+main := (pre ':' (nid - pre %err(err_urn)) $err(err_nid) ':' nss >mark %set_nss) $err(err_parse);
+
+}%%
+
+%% write data noerror noprefix;
+
+// Machine is the interface representing the FSM
+type Machine interface {
+ Error() error
+ Parse(input []byte) (*URN, error)
+}
+
+type machine struct {
+ data []byte
+ cs int
+ p, pe, eof, pb int
+ err error
+ tolower []int
+}
+
+// NewMachine creates a new FSM able to parse RFC 2141 strings.
+func NewMachine() Machine {
+ m := &machine{}
+
+ %% access m.;
+ %% variable p m.p;
+ %% variable pe m.pe;
+ %% variable eof m.eof;
+ %% variable data m.data;
+
+ return m
+}
+
+// Err returns the error that occurred on the last call to Parse.
+//
+// If the result is nil, then the line was parsed successfully.
+func (m *machine) Error() error {
+ return m.err
+}
+
+func (m *machine) text() []byte {
+ return m.data[m.pb:m.p]
+}
+
+// Parse parses the input byte array as a RFC 2141 string.
+func (m *machine) Parse(input []byte) (*URN, error) {
+ m.data = input
+ m.p = 0
+ m.pb = 0
+ m.pe = len(input)
+ m.eof = len(input)
+ m.err = nil
+ m.tolower = []int{}
+ output := &URN{}
+
+ %% write init;
+ %% write exec;
+
+ if m.cs < first_final || m.cs == en_fail {
+ return nil, m.err
+ }
+
+ return output, nil
+}
diff --git a/backend/vendor/github.com/leodido/go-urn/makefile b/backend/vendor/github.com/leodido/go-urn/makefile
new file mode 100644
index 00000000..362137ad
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/makefile
@@ -0,0 +1,17 @@
+SHELL := /bin/bash
+
+machine.go: machine.go.rl
+ ragel -Z -G2 -e -o $@ $<
+ @gofmt -w -s $@
+ @sed -i '/^\/\/line/d' $@
+
+.PHONY: build
+build: machine.go
+
+.PHONY: bench
+bench: *_test.go machine.go
+ go test -bench=. -benchmem -benchtime=5s ./...
+
+.PHONY: tests
+tests: *_test.go machine.go
+ go test -race -timeout 10s -coverprofile=coverage.out -covermode=atomic -v ./...
\ No newline at end of file
diff --git a/backend/vendor/github.com/leodido/go-urn/urn.go b/backend/vendor/github.com/leodido/go-urn/urn.go
new file mode 100644
index 00000000..b903b7b3
--- /dev/null
+++ b/backend/vendor/github.com/leodido/go-urn/urn.go
@@ -0,0 +1,63 @@
+package urn
+
+import (
+ "strings"
+)
+
+// URN represents an Uniform Resource Name.
+//
+// The general form represented is:
+//
+// urn::
+//
+// Details at https://tools.ietf.org/html/rfc2141.
+type URN struct {
+ prefix string // Static prefix. Equal to "urn" when empty.
+ ID string // Namespace identifier
+ SS string // Namespace specific string
+ norm string // Normalized namespace specific string
+}
+
+// Normalize turns the receiving URN into its norm version.
+//
+// Which means: lowercase prefix, lowercase namespace identifier, and immutate namespace specific string chars (except tokens which are lowercased).
+func (u *URN) Normalize() *URN {
+ return &URN{
+ prefix: "urn",
+ ID: strings.ToLower(u.ID),
+ SS: u.norm,
+ }
+}
+
+// Equal checks the lexical equivalence of the current URN with another one.
+func (u *URN) Equal(x *URN) bool {
+ return *u.Normalize() == *x.Normalize()
+}
+
+// String reassembles the URN into a valid URN string.
+//
+// This requires both ID and SS fields to be non-empty.
+// Otherwise it returns an empty string.
+//
+// Default URN prefix is "urn".
+func (u *URN) String() string {
+ var res string
+ if u.ID != "" && u.SS != "" {
+ if u.prefix == "" {
+ res += "urn"
+ }
+ res += u.prefix + ":" + u.ID + ":" + u.SS
+ }
+
+ return res
+}
+
+// Parse is responsible to create an URN instance from a byte array matching the correct URN syntax.
+func Parse(u []byte) (*URN, bool) {
+ urn, err := NewMachine().Parse(u)
+ if err != nil {
+ return nil, false
+ }
+
+ return urn, true
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/.gitignore b/backend/vendor/github.com/smartystreets/assertions/.gitignore
new file mode 100644
index 00000000..07d3c71c
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/.gitignore
@@ -0,0 +1,5 @@
+.DS_Store
+Thumbs.db
+*.iml
+/.idea
+coverage.out
diff --git a/backend/vendor/github.com/smartystreets/assertions/.travis.yml b/backend/vendor/github.com/smartystreets/assertions/.travis.yml
new file mode 100644
index 00000000..72df752f
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/.travis.yml
@@ -0,0 +1,11 @@
+language: go
+
+go:
+ - 1.x
+
+install:
+ - go get -t ./...
+
+script: go test ./... -v
+
+sudo: false
diff --git a/backend/vendor/github.com/smartystreets/assertions/CONTRIBUTING.md b/backend/vendor/github.com/smartystreets/assertions/CONTRIBUTING.md
new file mode 100644
index 00000000..1820ecb3
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/CONTRIBUTING.md
@@ -0,0 +1,12 @@
+# Contributing
+
+In general, the code posted to the [SmartyStreets github organization](https://github.com/smartystreets) is created to solve specific problems at SmartyStreets that are ancillary to our core products in the address verification industry and may or may not be useful to other organizations or developers. Our reason for posting said code isn't necessarily to solicit feedback or contributions from the community but more as a showcase of some of the approaches to solving problems we have adopted.
+
+Having stated that, we do consider issues raised by other githubbers as well as contributions submitted via pull requests. When submitting such a pull request, please follow these guidelines:
+
+- _Look before you leap:_ If the changes you plan to make are significant, it's in everyone's best interest for you to discuss them with a SmartyStreets team member prior to opening a pull request.
+- _License and ownership:_ If modifying the `LICENSE.md` file, limit your changes to fixing typographical mistakes. Do NOT modify the actual terms in the license or the copyright by **SmartyStreets, LLC**. Code submitted to SmartyStreets projects becomes property of SmartyStreets and must be compatible with the associated license.
+- _Testing:_ If the code you are submitting resides in packages/modules covered by automated tests, be sure to add passing tests that cover your changes and assert expected behavior and state. Submit the additional test cases as part of your change set.
+- _Style:_ Match your approach to **naming** and **formatting** with the surrounding code. Basically, the code you submit shouldn't stand out.
+ - "Naming" refers to such constructs as variables, methods, functions, classes, structs, interfaces, packages, modules, directories, files, etc...
+ - "Formatting" refers to such constructs as whitespace, horizontal line length, vertical function length, vertical file length, indentation, curly braces, etc...
diff --git a/backend/vendor/github.com/smartystreets/assertions/LICENSE.md b/backend/vendor/github.com/smartystreets/assertions/LICENSE.md
new file mode 100644
index 00000000..8ea6f945
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/LICENSE.md
@@ -0,0 +1,23 @@
+Copyright (c) 2016 SmartyStreets, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+NOTE: Various optional and subordinate components carry their own licensing
+requirements and restrictions. Use of those components is subject to the terms
+and conditions outlined the respective license of each component.
diff --git a/backend/vendor/github.com/smartystreets/assertions/README.md b/backend/vendor/github.com/smartystreets/assertions/README.md
new file mode 100644
index 00000000..208a4040
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/README.md
@@ -0,0 +1,619 @@
+# assertions
+--
+ import "github.com/smartystreets/assertions"
+
+Package assertions contains the implementations for all assertions which are
+referenced in goconvey's `convey` package
+(github.com/smartystreets/goconvey/convey) and gunit
+(github.com/smartystreets/gunit) for use with the So(...) method. They can also
+be used in traditional Go test functions and even in applications.
+
+https://smartystreets.com
+
+Many of the assertions lean heavily on work done by Aaron Jacobs in his
+excellent oglematchers library. (https://github.com/jacobsa/oglematchers) The
+ShouldResemble assertion leans heavily on work done by Daniel Jacques in his
+very helpful go-render library. (https://github.com/luci/go-render)
+
+## Usage
+
+#### func GoConveyMode
+
+```go
+func GoConveyMode(yes bool)
+```
+GoConveyMode provides control over JSON serialization of failures. When using
+the assertions in this package from the convey package JSON results are very
+helpful and can be rendered in a DIFF view. In that case, this function will be
+called with a true value to enable the JSON serialization. By default, the
+assertions in this package will not serializer a JSON result, making standalone
+usage more convenient.
+
+#### func ShouldAlmostEqual
+
+```go
+func ShouldAlmostEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldAlmostEqual makes sure that two parameters are close enough to being
+equal. The acceptable delta may be specified with a third argument, or a very
+small default delta will be used.
+
+#### func ShouldBeBetween
+
+```go
+func ShouldBeBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldBeBetween receives exactly three parameters: an actual value, a lower
+bound, and an upper bound. It ensures that the actual value is between both
+bounds (but not equal to either of them).
+
+#### func ShouldBeBetweenOrEqual
+
+```go
+func ShouldBeBetweenOrEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldBeBetweenOrEqual receives exactly three parameters: an actual value, a
+lower bound, and an upper bound. It ensures that the actual value is between
+both bounds or equal to one of them.
+
+#### func ShouldBeBlank
+
+```go
+func ShouldBeBlank(actual interface{}, expected ...interface{}) string
+```
+ShouldBeBlank receives exactly 1 string parameter and ensures that it is equal
+to "".
+
+#### func ShouldBeChronological
+
+```go
+func ShouldBeChronological(actual interface{}, expected ...interface{}) string
+```
+ShouldBeChronological receives a []time.Time slice and asserts that they are in
+chronological order starting with the first time.Time as the earliest.
+
+#### func ShouldBeEmpty
+
+```go
+func ShouldBeEmpty(actual interface{}, expected ...interface{}) string
+```
+ShouldBeEmpty receives a single parameter (actual) and determines whether or not
+calling len(actual) would return `0`. It obeys the rules specified by the len
+function for determining length: http://golang.org/pkg/builtin/#len
+
+#### func ShouldBeError
+
+```go
+func ShouldBeError(actual interface{}, expected ...interface{}) string
+```
+ShouldBeError asserts that the first argument implements the error interface. It
+also compares the first argument against the second argument if provided (which
+must be an error message string or another error value).
+
+#### func ShouldBeFalse
+
+```go
+func ShouldBeFalse(actual interface{}, expected ...interface{}) string
+```
+ShouldBeFalse receives a single parameter and ensures that it is false.
+
+#### func ShouldBeGreaterThan
+
+```go
+func ShouldBeGreaterThan(actual interface{}, expected ...interface{}) string
+```
+ShouldBeGreaterThan receives exactly two parameters and ensures that the first
+is greater than the second.
+
+#### func ShouldBeGreaterThanOrEqualTo
+
+```go
+func ShouldBeGreaterThanOrEqualTo(actual interface{}, expected ...interface{}) string
+```
+ShouldBeGreaterThanOrEqualTo receives exactly two parameters and ensures that
+the first is greater than or equal to the second.
+
+#### func ShouldBeIn
+
+```go
+func ShouldBeIn(actual interface{}, expected ...interface{}) string
+```
+ShouldBeIn receives at least 2 parameters. The first is a proposed member of the
+collection that is passed in either as the second parameter, or of the
+collection that is comprised of all the remaining parameters. This assertion
+ensures that the proposed member is in the collection (using ShouldEqual).
+
+#### func ShouldBeLessThan
+
+```go
+func ShouldBeLessThan(actual interface{}, expected ...interface{}) string
+```
+ShouldBeLessThan receives exactly two parameters and ensures that the first is
+less than the second.
+
+#### func ShouldBeLessThanOrEqualTo
+
+```go
+func ShouldBeLessThanOrEqualTo(actual interface{}, expected ...interface{}) string
+```
+ShouldBeLessThan receives exactly two parameters and ensures that the first is
+less than or equal to the second.
+
+#### func ShouldBeNil
+
+```go
+func ShouldBeNil(actual interface{}, expected ...interface{}) string
+```
+ShouldBeNil receives a single parameter and ensures that it is nil.
+
+#### func ShouldBeTrue
+
+```go
+func ShouldBeTrue(actual interface{}, expected ...interface{}) string
+```
+ShouldBeTrue receives a single parameter and ensures that it is true.
+
+#### func ShouldBeZeroValue
+
+```go
+func ShouldBeZeroValue(actual interface{}, expected ...interface{}) string
+```
+ShouldBeZeroValue receives a single parameter and ensures that it is the Go
+equivalent of the default value, or "zero" value.
+
+#### func ShouldContain
+
+```go
+func ShouldContain(actual interface{}, expected ...interface{}) string
+```
+ShouldContain receives exactly two parameters. The first is a slice and the
+second is a proposed member. Membership is determined using ShouldEqual.
+
+#### func ShouldContainKey
+
+```go
+func ShouldContainKey(actual interface{}, expected ...interface{}) string
+```
+ShouldContainKey receives exactly two parameters. The first is a map and the
+second is a proposed key. Keys are compared with a simple '=='.
+
+#### func ShouldContainSubstring
+
+```go
+func ShouldContainSubstring(actual interface{}, expected ...interface{}) string
+```
+ShouldContainSubstring receives exactly 2 string parameters and ensures that the
+first contains the second as a substring.
+
+#### func ShouldEndWith
+
+```go
+func ShouldEndWith(actual interface{}, expected ...interface{}) string
+```
+ShouldEndWith receives exactly 2 string parameters and ensures that the first
+ends with the second.
+
+#### func ShouldEqual
+
+```go
+func ShouldEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldEqual receives exactly two parameters and does an equality check using the
+following semantics: 1. If the expected and actual values implement an Equal
+method in the form `func (this T) Equal(that T) bool` then call the method. If
+true, they are equal. 2. The expected and actual values are judged equal or not
+by oglematchers.Equals.
+
+#### func ShouldEqualJSON
+
+```go
+func ShouldEqualJSON(actual interface{}, expected ...interface{}) string
+```
+ShouldEqualJSON receives exactly two parameters and does an equality check by
+marshalling to JSON
+
+#### func ShouldEqualTrimSpace
+
+```go
+func ShouldEqualTrimSpace(actual interface{}, expected ...interface{}) string
+```
+ShouldEqualTrimSpace receives exactly 2 string parameters and ensures that the
+first is equal to the second after removing all leading and trailing whitespace
+using strings.TrimSpace(first).
+
+#### func ShouldEqualWithout
+
+```go
+func ShouldEqualWithout(actual interface{}, expected ...interface{}) string
+```
+ShouldEqualWithout receives exactly 3 string parameters and ensures that the
+first is equal to the second after removing all instances of the third from the
+first using strings.Replace(first, third, "", -1).
+
+#### func ShouldHappenAfter
+
+```go
+func ShouldHappenAfter(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenAfter receives exactly 2 time.Time arguments and asserts that the
+first happens after the second.
+
+#### func ShouldHappenBefore
+
+```go
+func ShouldHappenBefore(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenBefore receives exactly 2 time.Time arguments and asserts that the
+first happens before the second.
+
+#### func ShouldHappenBetween
+
+```go
+func ShouldHappenBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenBetween receives exactly 3 time.Time arguments and asserts that the
+first happens between (not on) the second and third.
+
+#### func ShouldHappenOnOrAfter
+
+```go
+func ShouldHappenOnOrAfter(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenOnOrAfter receives exactly 2 time.Time arguments and asserts that
+the first happens on or after the second.
+
+#### func ShouldHappenOnOrBefore
+
+```go
+func ShouldHappenOnOrBefore(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenOnOrBefore receives exactly 2 time.Time arguments and asserts that
+the first happens on or before the second.
+
+#### func ShouldHappenOnOrBetween
+
+```go
+func ShouldHappenOnOrBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that
+the first happens between or on the second and third.
+
+#### func ShouldHappenWithin
+
+```go
+func ShouldHappenWithin(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenWithin receives a time.Time, a time.Duration, and a time.Time (3
+arguments) and asserts that the first time.Time happens within or on the
+duration specified relative to the other time.Time.
+
+#### func ShouldHaveLength
+
+```go
+func ShouldHaveLength(actual interface{}, expected ...interface{}) string
+```
+ShouldHaveLength receives 2 parameters. The first is a collection to check the
+length of, the second being the expected length. It obeys the rules specified by
+the len function for determining length: http://golang.org/pkg/builtin/#len
+
+#### func ShouldHaveSameTypeAs
+
+```go
+func ShouldHaveSameTypeAs(actual interface{}, expected ...interface{}) string
+```
+ShouldHaveSameTypeAs receives exactly two parameters and compares their
+underlying types for equality.
+
+#### func ShouldImplement
+
+```go
+func ShouldImplement(actual interface{}, expectedList ...interface{}) string
+```
+ShouldImplement receives exactly two parameters and ensures that the first
+implements the interface type of the second.
+
+#### func ShouldNotAlmostEqual
+
+```go
+func ShouldNotAlmostEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldNotAlmostEqual is the inverse of ShouldAlmostEqual
+
+#### func ShouldNotBeBetween
+
+```go
+func ShouldNotBeBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeBetween receives exactly three parameters: an actual value, a lower
+bound, and an upper bound. It ensures that the actual value is NOT between both
+bounds.
+
+#### func ShouldNotBeBetweenOrEqual
+
+```go
+func ShouldNotBeBetweenOrEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeBetweenOrEqual receives exactly three parameters: an actual value, a
+lower bound, and an upper bound. It ensures that the actual value is nopt
+between the bounds nor equal to either of them.
+
+#### func ShouldNotBeBlank
+
+```go
+func ShouldNotBeBlank(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeBlank receives exactly 1 string parameter and ensures that it is
+equal to "".
+
+#### func ShouldNotBeChronological
+
+```go
+func ShouldNotBeChronological(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeChronological receives a []time.Time slice and asserts that they are
+NOT in chronological order.
+
+#### func ShouldNotBeEmpty
+
+```go
+func ShouldNotBeEmpty(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeEmpty receives a single parameter (actual) and determines whether or
+not calling len(actual) would return a value greater than zero. It obeys the
+rules specified by the `len` function for determining length:
+http://golang.org/pkg/builtin/#len
+
+#### func ShouldNotBeIn
+
+```go
+func ShouldNotBeIn(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeIn receives at least 2 parameters. The first is a proposed member of
+the collection that is passed in either as the second parameter, or of the
+collection that is comprised of all the remaining parameters. This assertion
+ensures that the proposed member is NOT in the collection (using ShouldEqual).
+
+#### func ShouldNotBeNil
+
+```go
+func ShouldNotBeNil(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeNil receives a single parameter and ensures that it is not nil.
+
+#### func ShouldNotBeZeroValue
+
+```go
+func ShouldNotBeZeroValue(actual interface{}, expected ...interface{}) string
+```
+ShouldBeZeroValue receives a single parameter and ensures that it is NOT the Go
+equivalent of the default value, or "zero" value.
+
+#### func ShouldNotContain
+
+```go
+func ShouldNotContain(actual interface{}, expected ...interface{}) string
+```
+ShouldNotContain receives exactly two parameters. The first is a slice and the
+second is a proposed member. Membership is determinied using ShouldEqual.
+
+#### func ShouldNotContainKey
+
+```go
+func ShouldNotContainKey(actual interface{}, expected ...interface{}) string
+```
+ShouldNotContainKey receives exactly two parameters. The first is a map and the
+second is a proposed absent key. Keys are compared with a simple '=='.
+
+#### func ShouldNotContainSubstring
+
+```go
+func ShouldNotContainSubstring(actual interface{}, expected ...interface{}) string
+```
+ShouldNotContainSubstring receives exactly 2 string parameters and ensures that
+the first does NOT contain the second as a substring.
+
+#### func ShouldNotEndWith
+
+```go
+func ShouldNotEndWith(actual interface{}, expected ...interface{}) string
+```
+ShouldEndWith receives exactly 2 string parameters and ensures that the first
+does not end with the second.
+
+#### func ShouldNotEqual
+
+```go
+func ShouldNotEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldNotEqual receives exactly two parameters and does an inequality check. See
+ShouldEqual for details on how equality is determined.
+
+#### func ShouldNotHappenOnOrBetween
+
+```go
+func ShouldNotHappenOnOrBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldNotHappenOnOrBetween receives exactly 3 time.Time arguments and asserts
+that the first does NOT happen between or on the second or third.
+
+#### func ShouldNotHappenWithin
+
+```go
+func ShouldNotHappenWithin(actual interface{}, expected ...interface{}) string
+```
+ShouldNotHappenWithin receives a time.Time, a time.Duration, and a time.Time (3
+arguments) and asserts that the first time.Time does NOT happen within or on the
+duration specified relative to the other time.Time.
+
+#### func ShouldNotHaveSameTypeAs
+
+```go
+func ShouldNotHaveSameTypeAs(actual interface{}, expected ...interface{}) string
+```
+ShouldNotHaveSameTypeAs receives exactly two parameters and compares their
+underlying types for inequality.
+
+#### func ShouldNotImplement
+
+```go
+func ShouldNotImplement(actual interface{}, expectedList ...interface{}) string
+```
+ShouldNotImplement receives exactly two parameters and ensures that the first
+does NOT implement the interface type of the second.
+
+#### func ShouldNotPanic
+
+```go
+func ShouldNotPanic(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldNotPanic receives a void, niladic function and expects to execute the
+function without any panic.
+
+#### func ShouldNotPanicWith
+
+```go
+func ShouldNotPanicWith(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldNotPanicWith receives a void, niladic function and expects to recover a
+panic whose content differs from the second argument.
+
+#### func ShouldNotPointTo
+
+```go
+func ShouldNotPointTo(actual interface{}, expected ...interface{}) string
+```
+ShouldNotPointTo receives exactly two parameters and checks to see that they
+point to different addresess.
+
+#### func ShouldNotResemble
+
+```go
+func ShouldNotResemble(actual interface{}, expected ...interface{}) string
+```
+ShouldNotResemble receives exactly two parameters and does an inverse deep equal
+check (see reflect.DeepEqual)
+
+#### func ShouldNotStartWith
+
+```go
+func ShouldNotStartWith(actual interface{}, expected ...interface{}) string
+```
+ShouldNotStartWith receives exactly 2 string parameters and ensures that the
+first does not start with the second.
+
+#### func ShouldPanic
+
+```go
+func ShouldPanic(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldPanic receives a void, niladic function and expects to recover a panic.
+
+#### func ShouldPanicWith
+
+```go
+func ShouldPanicWith(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldPanicWith receives a void, niladic function and expects to recover a panic
+with the second argument as the content.
+
+#### func ShouldPointTo
+
+```go
+func ShouldPointTo(actual interface{}, expected ...interface{}) string
+```
+ShouldPointTo receives exactly two parameters and checks to see that they point
+to the same address.
+
+#### func ShouldResemble
+
+```go
+func ShouldResemble(actual interface{}, expected ...interface{}) string
+```
+ShouldResemble receives exactly two parameters and does a deep equal check (see
+reflect.DeepEqual)
+
+#### func ShouldStartWith
+
+```go
+func ShouldStartWith(actual interface{}, expected ...interface{}) string
+```
+ShouldStartWith receives exactly 2 string parameters and ensures that the first
+starts with the second.
+
+#### func So
+
+```go
+func So(actual interface{}, assert assertion, expected ...interface{}) (bool, string)
+```
+So is a convenience function (as opposed to an inconvenience function?) for
+running assertions on arbitrary arguments in any context, be it for testing or
+even application logging. It allows you to perform assertion-like behavior (and
+get nicely formatted messages detailing discrepancies) but without the program
+blowing up or panicking. All that is required is to import this package and call
+`So` with one of the assertions exported by this package as the second
+parameter. The first return parameter is a boolean indicating if the assertion
+was true. The second return parameter is the well-formatted message showing why
+an assertion was incorrect, or blank if the assertion was correct.
+
+Example:
+
+ if ok, message := So(x, ShouldBeGreaterThan, y); !ok {
+ log.Println(message)
+ }
+
+For an alternative implementation of So (that provides more flexible return
+options) see the `So` function in the package at
+github.com/smartystreets/assertions/assert.
+
+#### type Assertion
+
+```go
+type Assertion struct {
+}
+```
+
+
+#### func New
+
+```go
+func New(t testingT) *Assertion
+```
+New swallows the *testing.T struct and prints failed assertions using t.Error.
+Example: assertions.New(t).So(1, should.Equal, 1)
+
+#### func (*Assertion) Failed
+
+```go
+func (this *Assertion) Failed() bool
+```
+Failed reports whether any calls to So (on this Assertion instance) have failed.
+
+#### func (*Assertion) So
+
+```go
+func (this *Assertion) So(actual interface{}, assert assertion, expected ...interface{}) bool
+```
+So calls the standalone So function and additionally, calls t.Error in failure
+scenarios.
+
+#### type FailureView
+
+```go
+type FailureView struct {
+ Message string `json:"Message"`
+ Expected string `json:"Expected"`
+ Actual string `json:"Actual"`
+}
+```
+
+This struct is also declared in
+github.com/smartystreets/goconvey/convey/reporting. The json struct tags should
+be equal in both declarations.
+
+#### type Serializer
+
+```go
+type Serializer interface {
+ // contains filtered or unexported methods
+}
+```
diff --git a/backend/vendor/github.com/smartystreets/assertions/collections.go b/backend/vendor/github.com/smartystreets/assertions/collections.go
new file mode 100644
index 00000000..b534d4ba
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/collections.go
@@ -0,0 +1,244 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// ShouldContain receives exactly two parameters. The first is a slice and the
+// second is a proposed member. Membership is determined using ShouldEqual.
+func ShouldContain(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ if matchError := oglematchers.Contains(expected[0]).Matches(actual); matchError != nil {
+ typeName := reflect.TypeOf(actual)
+
+ if fmt.Sprintf("%v", matchError) == "which is not a slice or array" {
+ return fmt.Sprintf(shouldHaveBeenAValidCollection, typeName)
+ }
+ return fmt.Sprintf(shouldHaveContained, typeName, expected[0])
+ }
+ return success
+}
+
+// ShouldNotContain receives exactly two parameters. The first is a slice and the
+// second is a proposed member. Membership is determinied using ShouldEqual.
+func ShouldNotContain(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ typeName := reflect.TypeOf(actual)
+
+ if matchError := oglematchers.Contains(expected[0]).Matches(actual); matchError != nil {
+ if fmt.Sprintf("%v", matchError) == "which is not a slice or array" {
+ return fmt.Sprintf(shouldHaveBeenAValidCollection, typeName)
+ }
+ return success
+ }
+ return fmt.Sprintf(shouldNotHaveContained, typeName, expected[0])
+}
+
+// ShouldContainKey receives exactly two parameters. The first is a map and the
+// second is a proposed key. Keys are compared with a simple '=='.
+func ShouldContainKey(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ keys, isMap := mapKeys(actual)
+ if !isMap {
+ return fmt.Sprintf(shouldHaveBeenAValidMap, reflect.TypeOf(actual))
+ }
+
+ if !keyFound(keys, expected[0]) {
+ return fmt.Sprintf(shouldHaveContainedKey, reflect.TypeOf(actual), expected)
+ }
+
+ return ""
+}
+
+// ShouldNotContainKey receives exactly two parameters. The first is a map and the
+// second is a proposed absent key. Keys are compared with a simple '=='.
+func ShouldNotContainKey(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ keys, isMap := mapKeys(actual)
+ if !isMap {
+ return fmt.Sprintf(shouldHaveBeenAValidMap, reflect.TypeOf(actual))
+ }
+
+ if keyFound(keys, expected[0]) {
+ return fmt.Sprintf(shouldNotHaveContainedKey, reflect.TypeOf(actual), expected)
+ }
+
+ return ""
+}
+
+func mapKeys(m interface{}) ([]reflect.Value, bool) {
+ value := reflect.ValueOf(m)
+ if value.Kind() != reflect.Map {
+ return nil, false
+ }
+ return value.MapKeys(), true
+}
+func keyFound(keys []reflect.Value, expectedKey interface{}) bool {
+ found := false
+ for _, key := range keys {
+ if key.Interface() == expectedKey {
+ found = true
+ }
+ }
+ return found
+}
+
+// ShouldBeIn receives at least 2 parameters. The first is a proposed member of the collection
+// that is passed in either as the second parameter, or of the collection that is comprised
+// of all the remaining parameters. This assertion ensures that the proposed member is in
+// the collection (using ShouldEqual).
+func ShouldBeIn(actual interface{}, expected ...interface{}) string {
+ if fail := atLeast(1, expected); fail != success {
+ return fail
+ }
+
+ if len(expected) == 1 {
+ return shouldBeIn(actual, expected[0])
+ }
+ return shouldBeIn(actual, expected)
+}
+func shouldBeIn(actual interface{}, expected interface{}) string {
+ if matchError := oglematchers.Contains(actual).Matches(expected); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenIn, actual, reflect.TypeOf(expected))
+ }
+ return success
+}
+
+// ShouldNotBeIn receives at least 2 parameters. The first is a proposed member of the collection
+// that is passed in either as the second parameter, or of the collection that is comprised
+// of all the remaining parameters. This assertion ensures that the proposed member is NOT in
+// the collection (using ShouldEqual).
+func ShouldNotBeIn(actual interface{}, expected ...interface{}) string {
+ if fail := atLeast(1, expected); fail != success {
+ return fail
+ }
+
+ if len(expected) == 1 {
+ return shouldNotBeIn(actual, expected[0])
+ }
+ return shouldNotBeIn(actual, expected)
+}
+func shouldNotBeIn(actual interface{}, expected interface{}) string {
+ if matchError := oglematchers.Contains(actual).Matches(expected); matchError == nil {
+ return fmt.Sprintf(shouldNotHaveBeenIn, actual, reflect.TypeOf(expected))
+ }
+ return success
+}
+
+// ShouldBeEmpty receives a single parameter (actual) and determines whether or not
+// calling len(actual) would return `0`. It obeys the rules specified by the len
+// function for determining length: http://golang.org/pkg/builtin/#len
+func ShouldBeEmpty(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ if actual == nil {
+ return success
+ }
+
+ value := reflect.ValueOf(actual)
+ switch value.Kind() {
+ case reflect.Slice:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.Chan:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.Map:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.String:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.Ptr:
+ elem := value.Elem()
+ kind := elem.Kind()
+ if (kind == reflect.Slice || kind == reflect.Array) && elem.Len() == 0 {
+ return success
+ }
+ }
+
+ return fmt.Sprintf(shouldHaveBeenEmpty, actual)
+}
+
+// ShouldNotBeEmpty receives a single parameter (actual) and determines whether or not
+// calling len(actual) would return a value greater than zero. It obeys the rules
+// specified by the `len` function for determining length: http://golang.org/pkg/builtin/#len
+func ShouldNotBeEmpty(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ if empty := ShouldBeEmpty(actual, expected...); empty != success {
+ return success
+ }
+ return fmt.Sprintf(shouldNotHaveBeenEmpty, actual)
+}
+
+// ShouldHaveLength receives 2 parameters. The first is a collection to check
+// the length of, the second being the expected length. It obeys the rules
+// specified by the len function for determining length:
+// http://golang.org/pkg/builtin/#len
+func ShouldHaveLength(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ var expectedLen int64
+ lenValue := reflect.ValueOf(expected[0])
+ switch lenValue.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ expectedLen = lenValue.Int()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ expectedLen = int64(lenValue.Uint())
+ default:
+ return fmt.Sprintf(shouldHaveBeenAValidInteger, reflect.TypeOf(expected[0]))
+ }
+
+ if expectedLen < 0 {
+ return fmt.Sprintf(shouldHaveBeenAValidLength, expected[0])
+ }
+
+ value := reflect.ValueOf(actual)
+ switch value.Kind() {
+ case reflect.Slice,
+ reflect.Chan,
+ reflect.Map,
+ reflect.String:
+ if int64(value.Len()) == expectedLen {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveHadLength, expectedLen, value.Len(), actual)
+ }
+ case reflect.Ptr:
+ elem := value.Elem()
+ kind := elem.Kind()
+ if kind == reflect.Slice || kind == reflect.Array {
+ if int64(elem.Len()) == expectedLen {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveHadLength, expectedLen, elem.Len(), actual)
+ }
+ }
+ }
+ return fmt.Sprintf(shouldHaveBeenAValidCollection, reflect.TypeOf(actual))
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/doc.go b/backend/vendor/github.com/smartystreets/assertions/doc.go
new file mode 100644
index 00000000..ba30a926
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/doc.go
@@ -0,0 +1,109 @@
+// Package assertions contains the implementations for all assertions which
+// are referenced in goconvey's `convey` package
+// (github.com/smartystreets/goconvey/convey) and gunit (github.com/smartystreets/gunit)
+// for use with the So(...) method.
+// They can also be used in traditional Go test functions and even in
+// applications.
+//
+// https://smartystreets.com
+//
+// Many of the assertions lean heavily on work done by Aaron Jacobs in his excellent oglematchers library.
+// (https://github.com/jacobsa/oglematchers)
+// The ShouldResemble assertion leans heavily on work done by Daniel Jacques in his very helpful go-render library.
+// (https://github.com/luci/go-render)
+package assertions
+
+import (
+ "fmt"
+ "runtime"
+)
+
+// By default we use a no-op serializer. The actual Serializer provides a JSON
+// representation of failure results on selected assertions so the goconvey
+// web UI can display a convenient diff.
+var serializer Serializer = new(noopSerializer)
+
+// GoConveyMode provides control over JSON serialization of failures. When
+// using the assertions in this package from the convey package JSON results
+// are very helpful and can be rendered in a DIFF view. In that case, this function
+// will be called with a true value to enable the JSON serialization. By default,
+// the assertions in this package will not serializer a JSON result, making
+// standalone usage more convenient.
+func GoConveyMode(yes bool) {
+ if yes {
+ serializer = newSerializer()
+ } else {
+ serializer = new(noopSerializer)
+ }
+}
+
+type testingT interface {
+ Error(args ...interface{})
+}
+
+type Assertion struct {
+ t testingT
+ failed bool
+}
+
+// New swallows the *testing.T struct and prints failed assertions using t.Error.
+// Example: assertions.New(t).So(1, should.Equal, 1)
+func New(t testingT) *Assertion {
+ return &Assertion{t: t}
+}
+
+// Failed reports whether any calls to So (on this Assertion instance) have failed.
+func (this *Assertion) Failed() bool {
+ return this.failed
+}
+
+// So calls the standalone So function and additionally, calls t.Error in failure scenarios.
+func (this *Assertion) So(actual interface{}, assert assertion, expected ...interface{}) bool {
+ ok, result := So(actual, assert, expected...)
+ if !ok {
+ this.failed = true
+ _, file, line, _ := runtime.Caller(1)
+ this.t.Error(fmt.Sprintf("\n%s:%d\n%s", file, line, result))
+ }
+ return ok
+}
+
+// So is a convenience function (as opposed to an inconvenience function?)
+// for running assertions on arbitrary arguments in any context, be it for testing or even
+// application logging. It allows you to perform assertion-like behavior (and get nicely
+// formatted messages detailing discrepancies) but without the program blowing up or panicking.
+// All that is required is to import this package and call `So` with one of the assertions
+// exported by this package as the second parameter.
+// The first return parameter is a boolean indicating if the assertion was true. The second
+// return parameter is the well-formatted message showing why an assertion was incorrect, or
+// blank if the assertion was correct.
+//
+// Example:
+//
+// if ok, message := So(x, ShouldBeGreaterThan, y); !ok {
+// log.Println(message)
+// }
+//
+// For an alternative implementation of So (that provides more flexible return options)
+// see the `So` function in the package at github.com/smartystreets/assertions/assert.
+func So(actual interface{}, assert assertion, expected ...interface{}) (bool, string) {
+ if result := so(actual, assert, expected...); len(result) == 0 {
+ return true, result
+ } else {
+ return false, result
+ }
+}
+
+// so is like So, except that it only returns the string message, which is blank if the
+// assertion passed. Used to facilitate testing.
+func so(actual interface{}, assert func(interface{}, ...interface{}) string, expected ...interface{}) string {
+ return assert(actual, expected...)
+}
+
+// assertion is an alias for a function with a signature that the So()
+// function can handle. Any future or custom assertions should conform to this
+// method signature. The return value should be an empty string if the assertion
+// passes and a well-formed failure message if not.
+type assertion func(actual interface{}, expected ...interface{}) string
+
+////////////////////////////////////////////////////////////////////////////
diff --git a/backend/vendor/github.com/smartystreets/assertions/equal_method.go b/backend/vendor/github.com/smartystreets/assertions/equal_method.go
new file mode 100644
index 00000000..c4fc38fa
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/equal_method.go
@@ -0,0 +1,75 @@
+package assertions
+
+import "reflect"
+
+type equalityMethodSpecification struct {
+ a interface{}
+ b interface{}
+
+ aType reflect.Type
+ bType reflect.Type
+
+ equalMethod reflect.Value
+}
+
+func newEqualityMethodSpecification(a, b interface{}) *equalityMethodSpecification {
+ return &equalityMethodSpecification{
+ a: a,
+ b: b,
+ }
+}
+
+func (this *equalityMethodSpecification) IsSatisfied() bool {
+ if !this.bothAreSameType() {
+ return false
+ }
+ if !this.typeHasEqualMethod() {
+ return false
+ }
+ if !this.equalMethodReceivesSameTypeForComparison() {
+ return false
+ }
+ if !this.equalMethodReturnsBool() {
+ return false
+ }
+ return true
+}
+
+func (this *equalityMethodSpecification) bothAreSameType() bool {
+ this.aType = reflect.TypeOf(this.a)
+ if this.aType == nil {
+ return false
+ }
+ if this.aType.Kind() == reflect.Ptr {
+ this.aType = this.aType.Elem()
+ }
+ this.bType = reflect.TypeOf(this.b)
+ return this.aType == this.bType
+}
+func (this *equalityMethodSpecification) typeHasEqualMethod() bool {
+ aInstance := reflect.ValueOf(this.a)
+ this.equalMethod = aInstance.MethodByName("Equal")
+ return this.equalMethod != reflect.Value{}
+}
+
+func (this *equalityMethodSpecification) equalMethodReceivesSameTypeForComparison() bool {
+ signature := this.equalMethod.Type()
+ return signature.NumIn() == 1 && signature.In(0) == this.aType
+}
+
+func (this *equalityMethodSpecification) equalMethodReturnsBool() bool {
+ signature := this.equalMethod.Type()
+ return signature.NumOut() == 1 && signature.Out(0) == reflect.TypeOf(true)
+}
+
+func (this *equalityMethodSpecification) AreEqual() bool {
+ a := reflect.ValueOf(this.a)
+ b := reflect.ValueOf(this.b)
+ return areEqual(a, b) && areEqual(b, a)
+}
+func areEqual(receiver reflect.Value, argument reflect.Value) bool {
+ equalMethod := receiver.MethodByName("Equal")
+ argumentList := []reflect.Value{argument}
+ result := equalMethod.Call(argumentList)
+ return result[0].Bool()
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/equality.go b/backend/vendor/github.com/smartystreets/assertions/equality.go
new file mode 100644
index 00000000..37a49f4e
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/equality.go
@@ -0,0 +1,331 @@
+package assertions
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+ "strings"
+
+ "github.com/smartystreets/assertions/internal/go-render/render"
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// ShouldEqual receives exactly two parameters and does an equality check
+// using the following semantics:
+// 1. If the expected and actual values implement an Equal method in the form
+// `func (this T) Equal(that T) bool` then call the method. If true, they are equal.
+// 2. The expected and actual values are judged equal or not by oglematchers.Equals.
+func ShouldEqual(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+ return shouldEqual(actual, expected[0])
+}
+func shouldEqual(actual, expected interface{}) (message string) {
+ defer func() {
+ if r := recover(); r != nil {
+ message = serializer.serialize(expected, actual, composeEqualityMismatchMessage(expected, actual))
+ }
+ }()
+
+ if spec := newEqualityMethodSpecification(expected, actual); spec.IsSatisfied() && spec.AreEqual() {
+ return success
+ } else if matchError := oglematchers.Equals(expected).Matches(actual); matchError == nil {
+ return success
+ }
+
+ return serializer.serialize(expected, actual, composeEqualityMismatchMessage(expected, actual))
+}
+func composeEqualityMismatchMessage(expected, actual interface{}) string {
+ var (
+ renderedExpected = fmt.Sprintf("%v", expected)
+ renderedActual = fmt.Sprintf("%v", actual)
+ )
+
+ if renderedExpected != renderedActual {
+ return fmt.Sprintf(shouldHaveBeenEqual+composePrettyDiff(renderedExpected, renderedActual), expected, actual)
+ } else if reflect.TypeOf(expected) != reflect.TypeOf(actual) {
+ return fmt.Sprintf(shouldHaveBeenEqualTypeMismatch, expected, expected, actual, actual)
+ } else {
+ return fmt.Sprintf(shouldHaveBeenEqualNoResemblance, renderedExpected)
+ }
+}
+
+// ShouldNotEqual receives exactly two parameters and does an inequality check.
+// See ShouldEqual for details on how equality is determined.
+func ShouldNotEqual(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if ShouldEqual(actual, expected[0]) == success {
+ return fmt.Sprintf(shouldNotHaveBeenEqual, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldAlmostEqual makes sure that two parameters are close enough to being equal.
+// The acceptable delta may be specified with a third argument,
+// or a very small default delta will be used.
+func ShouldAlmostEqual(actual interface{}, expected ...interface{}) string {
+ actualFloat, expectedFloat, deltaFloat, err := cleanAlmostEqualInput(actual, expected...)
+
+ if err != "" {
+ return err
+ }
+
+ if math.Abs(actualFloat-expectedFloat) <= deltaFloat {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveBeenAlmostEqual, actualFloat, expectedFloat)
+ }
+}
+
+// ShouldNotAlmostEqual is the inverse of ShouldAlmostEqual
+func ShouldNotAlmostEqual(actual interface{}, expected ...interface{}) string {
+ actualFloat, expectedFloat, deltaFloat, err := cleanAlmostEqualInput(actual, expected...)
+
+ if err != "" {
+ return err
+ }
+
+ if math.Abs(actualFloat-expectedFloat) > deltaFloat {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveNotBeenAlmostEqual, actualFloat, expectedFloat)
+ }
+}
+
+func cleanAlmostEqualInput(actual interface{}, expected ...interface{}) (float64, float64, float64, string) {
+ deltaFloat := 0.0000000001
+
+ if len(expected) == 0 {
+ return 0.0, 0.0, 0.0, "This assertion requires exactly one comparison value and an optional delta (you provided neither)"
+ } else if len(expected) == 2 {
+ delta, err := getFloat(expected[1])
+
+ if err != nil {
+ return 0.0, 0.0, 0.0, "The delta value " + err.Error()
+ }
+
+ deltaFloat = delta
+ } else if len(expected) > 2 {
+ return 0.0, 0.0, 0.0, "This assertion requires exactly one comparison value and an optional delta (you provided more values)"
+ }
+
+ actualFloat, err := getFloat(actual)
+ if err != nil {
+ return 0.0, 0.0, 0.0, "The actual value " + err.Error()
+ }
+
+ expectedFloat, err := getFloat(expected[0])
+ if err != nil {
+ return 0.0, 0.0, 0.0, "The comparison value " + err.Error()
+ }
+
+ return actualFloat, expectedFloat, deltaFloat, ""
+}
+
+// returns the float value of any real number, or error if it is not a numerical type
+func getFloat(num interface{}) (float64, error) {
+ numValue := reflect.ValueOf(num)
+ numKind := numValue.Kind()
+
+ if numKind == reflect.Int ||
+ numKind == reflect.Int8 ||
+ numKind == reflect.Int16 ||
+ numKind == reflect.Int32 ||
+ numKind == reflect.Int64 {
+ return float64(numValue.Int()), nil
+ } else if numKind == reflect.Uint ||
+ numKind == reflect.Uint8 ||
+ numKind == reflect.Uint16 ||
+ numKind == reflect.Uint32 ||
+ numKind == reflect.Uint64 {
+ return float64(numValue.Uint()), nil
+ } else if numKind == reflect.Float32 ||
+ numKind == reflect.Float64 {
+ return numValue.Float(), nil
+ } else {
+ return 0.0, errors.New("must be a numerical type, but was: " + numKind.String())
+ }
+}
+
+// ShouldEqualJSON receives exactly two parameters and does an equality check by marshalling to JSON
+func ShouldEqualJSON(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+
+ expectedString, expectedErr := remarshal(expected[0].(string))
+ if expectedErr != nil {
+ return "Expected value not valid JSON: " + expectedErr.Error()
+ }
+
+ actualString, actualErr := remarshal(actual.(string))
+ if actualErr != nil {
+ return "Actual value not valid JSON: " + actualErr.Error()
+ }
+
+ return ShouldEqual(actualString, expectedString)
+}
+func remarshal(value string) (string, error) {
+ var structured interface{}
+ err := json.Unmarshal([]byte(value), &structured)
+ if err != nil {
+ return "", err
+ }
+ canonical, _ := json.Marshal(structured)
+ return string(canonical), nil
+}
+
+// ShouldResemble receives exactly two parameters and does a deep equal check (see reflect.DeepEqual)
+func ShouldResemble(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+
+ if matchError := oglematchers.DeepEquals(expected[0]).Matches(actual); matchError != nil {
+ renderedExpected, renderedActual := render.Render(expected[0]), render.Render(actual)
+ message := fmt.Sprintf(shouldHaveResembled, renderedExpected, renderedActual) +
+ composePrettyDiff(renderedExpected, renderedActual)
+ return serializer.serializeDetailed(expected[0], actual, message)
+ }
+
+ return success
+}
+
+// ShouldNotResemble receives exactly two parameters and does an inverse deep equal check (see reflect.DeepEqual)
+func ShouldNotResemble(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ } else if ShouldResemble(actual, expected[0]) == success {
+ return fmt.Sprintf(shouldNotHaveResembled, render.Render(actual), render.Render(expected[0]))
+ }
+ return success
+}
+
+// ShouldPointTo receives exactly two parameters and checks to see that they point to the same address.
+func ShouldPointTo(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+ return shouldPointTo(actual, expected[0])
+
+}
+func shouldPointTo(actual, expected interface{}) string {
+ actualValue := reflect.ValueOf(actual)
+ expectedValue := reflect.ValueOf(expected)
+
+ if ShouldNotBeNil(actual) != success {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "first", "nil")
+ } else if ShouldNotBeNil(expected) != success {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "second", "nil")
+ } else if actualValue.Kind() != reflect.Ptr {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "first", "not")
+ } else if expectedValue.Kind() != reflect.Ptr {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "second", "not")
+ } else if ShouldEqual(actualValue.Pointer(), expectedValue.Pointer()) != success {
+ actualAddress := reflect.ValueOf(actual).Pointer()
+ expectedAddress := reflect.ValueOf(expected).Pointer()
+ return serializer.serialize(expectedAddress, actualAddress, fmt.Sprintf(shouldHavePointedTo,
+ actual, actualAddress,
+ expected, expectedAddress))
+ }
+ return success
+}
+
+// ShouldNotPointTo receives exactly two parameters and checks to see that they point to different addresess.
+func ShouldNotPointTo(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+ compare := ShouldPointTo(actual, expected[0])
+ if strings.HasPrefix(compare, shouldBePointers) {
+ return compare
+ } else if compare == success {
+ return fmt.Sprintf(shouldNotHavePointedTo, actual, expected[0], reflect.ValueOf(actual).Pointer())
+ }
+ return success
+}
+
+// ShouldBeNil receives a single parameter and ensures that it is nil.
+func ShouldBeNil(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if actual == nil {
+ return success
+ } else if interfaceHasNilValue(actual) {
+ return success
+ }
+ return fmt.Sprintf(shouldHaveBeenNil, actual)
+}
+func interfaceHasNilValue(actual interface{}) bool {
+ value := reflect.ValueOf(actual)
+ kind := value.Kind()
+ nilable := kind == reflect.Slice ||
+ kind == reflect.Chan ||
+ kind == reflect.Func ||
+ kind == reflect.Ptr ||
+ kind == reflect.Map
+
+ // Careful: reflect.Value.IsNil() will panic unless it's an interface, chan, map, func, slice, or ptr
+ // Reference: http://golang.org/pkg/reflect/#Value.IsNil
+ return nilable && value.IsNil()
+}
+
+// ShouldNotBeNil receives a single parameter and ensures that it is not nil.
+func ShouldNotBeNil(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if ShouldBeNil(actual) == success {
+ return fmt.Sprintf(shouldNotHaveBeenNil, actual)
+ }
+ return success
+}
+
+// ShouldBeTrue receives a single parameter and ensures that it is true.
+func ShouldBeTrue(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if actual != true {
+ return fmt.Sprintf(shouldHaveBeenTrue, actual)
+ }
+ return success
+}
+
+// ShouldBeFalse receives a single parameter and ensures that it is false.
+func ShouldBeFalse(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if actual != false {
+ return fmt.Sprintf(shouldHaveBeenFalse, actual)
+ }
+ return success
+}
+
+// ShouldBeZeroValue receives a single parameter and ensures that it is
+// the Go equivalent of the default value, or "zero" value.
+func ShouldBeZeroValue(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ zeroVal := reflect.Zero(reflect.TypeOf(actual)).Interface()
+ if !reflect.DeepEqual(zeroVal, actual) {
+ return serializer.serialize(zeroVal, actual, fmt.Sprintf(shouldHaveBeenZeroValue, actual))
+ }
+ return success
+}
+
+// ShouldBeZeroValue receives a single parameter and ensures that it is NOT
+// the Go equivalent of the default value, or "zero" value.
+func ShouldNotBeZeroValue(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ zeroVal := reflect.Zero(reflect.TypeOf(actual)).Interface()
+ if reflect.DeepEqual(zeroVal, actual) {
+ return serializer.serialize(zeroVal, actual, fmt.Sprintf(shouldNotHaveBeenZeroValue, actual))
+ }
+ return success
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/equality_diff.go b/backend/vendor/github.com/smartystreets/assertions/equality_diff.go
new file mode 100644
index 00000000..bd698ff6
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/equality_diff.go
@@ -0,0 +1,37 @@
+package assertions
+
+import (
+ "fmt"
+
+ "github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch"
+)
+
+func composePrettyDiff(expected, actual string) string {
+ diff := diffmatchpatch.New()
+ diffs := diff.DiffMain(expected, actual, false)
+ if prettyDiffIsLikelyToBeHelpful(diffs) {
+ return fmt.Sprintf("\nDiff: '%s'", diff.DiffPrettyText(diffs))
+ }
+ return ""
+}
+
+// prettyDiffIsLikelyToBeHelpful returns true if the diff listing contains
+// more 'equal' segments than 'deleted'/'inserted' segments.
+func prettyDiffIsLikelyToBeHelpful(diffs []diffmatchpatch.Diff) bool {
+ equal, deleted, inserted := measureDiffTypeLengths(diffs)
+ return equal > deleted && equal > inserted
+}
+
+func measureDiffTypeLengths(diffs []diffmatchpatch.Diff) (equal, deleted, inserted int) {
+ for _, segment := range diffs {
+ switch segment.Type {
+ case diffmatchpatch.DiffEqual:
+ equal += len(segment.Text)
+ case diffmatchpatch.DiffDelete:
+ deleted += len(segment.Text)
+ case diffmatchpatch.DiffInsert:
+ inserted += len(segment.Text)
+ }
+ }
+ return equal, deleted, inserted
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/filter.go b/backend/vendor/github.com/smartystreets/assertions/filter.go
new file mode 100644
index 00000000..cbf75667
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/filter.go
@@ -0,0 +1,31 @@
+package assertions
+
+import "fmt"
+
+const (
+ success = ""
+ needExactValues = "This assertion requires exactly %d comparison values (you provided %d)."
+ needNonEmptyCollection = "This assertion requires at least 1 comparison value (you provided 0)."
+ needFewerValues = "This assertion allows %d or fewer comparison values (you provided %d)."
+)
+
+func need(needed int, expected []interface{}) string {
+ if len(expected) != needed {
+ return fmt.Sprintf(needExactValues, needed, len(expected))
+ }
+ return success
+}
+
+func atLeast(minimum int, expected []interface{}) string {
+ if len(expected) < minimum {
+ return needNonEmptyCollection
+ }
+ return success
+}
+
+func atMost(max int, expected []interface{}) string {
+ if len(expected) > max {
+ return fmt.Sprintf(needFewerValues, max, len(expected))
+ }
+ return success
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/go.mod b/backend/vendor/github.com/smartystreets/assertions/go.mod
new file mode 100644
index 00000000..c0daaa3d
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/go.mod
@@ -0,0 +1,3 @@
+module github.com/smartystreets/assertions
+
+go 1.12
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/AUTHORS b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/AUTHORS
new file mode 100644
index 00000000..2d7bb2bf
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/AUTHORS
@@ -0,0 +1,25 @@
+# This is the official list of go-diff authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+
+# Names should be added to this file as
+# Name or Organization
+# The email address is not required for organizations.
+
+# Please keep the list sorted.
+
+Danny Yoo
+James Kolb
+Jonathan Amsterdam
+Markus Zimmermann
+Matt Kovars
+Örjan Persson
+Osman Masood
+Robert Carlsen
+Rory Flynn
+Sergi Mansilla
+Shatrugna Sadhu
+Shawn Smith
+Stas Maksimov
+Tor Arvid Lund
+Zac Bergquist
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/CONTRIBUTORS b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/CONTRIBUTORS
new file mode 100644
index 00000000..369e3d55
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/CONTRIBUTORS
@@ -0,0 +1,32 @@
+# This is the official list of people who can contribute
+# (and typically have contributed) code to the go-diff
+# repository.
+#
+# The AUTHORS file lists the copyright holders; this file
+# lists people. For example, ACME Inc. employees would be listed here
+# but not in AUTHORS, because ACME Inc. would hold the copyright.
+#
+# When adding J Random Contributor's name to this file,
+# either J's name or J's organization's name should be
+# added to the AUTHORS file.
+#
+# Names should be added to this file like so:
+# Name
+#
+# Please keep the list sorted.
+
+Danny Yoo
+James Kolb
+Jonathan Amsterdam
+Markus Zimmermann
+Matt Kovars
+Örjan Persson
+Osman Masood
+Robert Carlsen
+Rory Flynn
+Sergi Mansilla
+Shatrugna Sadhu
+Shawn Smith
+Stas Maksimov
+Tor Arvid Lund
+Zac Bergquist
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/LICENSE b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/LICENSE
new file mode 100644
index 00000000..937942c2
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2012-2016 The go-diff Authors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/diff.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/diff.go
new file mode 100644
index 00000000..cb25b437
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/diff.go
@@ -0,0 +1,1345 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "html"
+ "math"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+)
+
+// Operation defines the operation of a diff item.
+type Operation int8
+
+//go:generate stringer -type=Operation -trimprefix=Diff
+
+const (
+ // DiffDelete item represents a delete diff.
+ DiffDelete Operation = -1
+ // DiffInsert item represents an insert diff.
+ DiffInsert Operation = 1
+ // DiffEqual item represents an equal diff.
+ DiffEqual Operation = 0
+)
+
+// Diff represents one diff operation
+type Diff struct {
+ Type Operation
+ Text string
+}
+
+// splice removes amount elements from slice at index index, replacing them with elements.
+func splice(slice []Diff, index int, amount int, elements ...Diff) []Diff {
+ if len(elements) == amount {
+ // Easy case: overwrite the relevant items.
+ copy(slice[index:], elements)
+ return slice
+ }
+ if len(elements) < amount {
+ // Fewer new items than old.
+ // Copy in the new items.
+ copy(slice[index:], elements)
+ // Shift the remaining items left.
+ copy(slice[index+len(elements):], slice[index+amount:])
+ // Calculate the new end of the slice.
+ end := len(slice) - amount + len(elements)
+ // Zero stranded elements at end so that they can be garbage collected.
+ tail := slice[end:]
+ for i := range tail {
+ tail[i] = Diff{}
+ }
+ return slice[:end]
+ }
+ // More new items than old.
+ // Make room in slice for new elements.
+ // There's probably an even more efficient way to do this,
+ // but this is simple and clear.
+ need := len(slice) - amount + len(elements)
+ for len(slice) < need {
+ slice = append(slice, Diff{})
+ }
+ // Shift slice elements right to make room for new elements.
+ copy(slice[index+len(elements):], slice[index+amount:])
+ // Copy in new elements.
+ copy(slice[index:], elements)
+ return slice
+}
+
+// DiffMain finds the differences between two texts.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+func (dmp *DiffMatchPatch) DiffMain(text1, text2 string, checklines bool) []Diff {
+ return dmp.DiffMainRunes([]rune(text1), []rune(text2), checklines)
+}
+
+// DiffMainRunes finds the differences between two rune sequences.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+func (dmp *DiffMatchPatch) DiffMainRunes(text1, text2 []rune, checklines bool) []Diff {
+ var deadline time.Time
+ if dmp.DiffTimeout > 0 {
+ deadline = time.Now().Add(dmp.DiffTimeout)
+ }
+ return dmp.diffMainRunes(text1, text2, checklines, deadline)
+}
+
+func (dmp *DiffMatchPatch) diffMainRunes(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
+ if runesEqual(text1, text2) {
+ var diffs []Diff
+ if len(text1) > 0 {
+ diffs = append(diffs, Diff{DiffEqual, string(text1)})
+ }
+ return diffs
+ }
+ // Trim off common prefix (speedup).
+ commonlength := commonPrefixLength(text1, text2)
+ commonprefix := text1[:commonlength]
+ text1 = text1[commonlength:]
+ text2 = text2[commonlength:]
+
+ // Trim off common suffix (speedup).
+ commonlength = commonSuffixLength(text1, text2)
+ commonsuffix := text1[len(text1)-commonlength:]
+ text1 = text1[:len(text1)-commonlength]
+ text2 = text2[:len(text2)-commonlength]
+
+ // Compute the diff on the middle block.
+ diffs := dmp.diffCompute(text1, text2, checklines, deadline)
+
+ // Restore the prefix and suffix.
+ if len(commonprefix) != 0 {
+ diffs = append([]Diff{Diff{DiffEqual, string(commonprefix)}}, diffs...)
+ }
+ if len(commonsuffix) != 0 {
+ diffs = append(diffs, Diff{DiffEqual, string(commonsuffix)})
+ }
+
+ return dmp.DiffCleanupMerge(diffs)
+}
+
+// diffCompute finds the differences between two rune slices. Assumes that the texts do not have any common prefix or suffix.
+func (dmp *DiffMatchPatch) diffCompute(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
+ diffs := []Diff{}
+ if len(text1) == 0 {
+ // Just add some text (speedup).
+ return append(diffs, Diff{DiffInsert, string(text2)})
+ } else if len(text2) == 0 {
+ // Just delete some text (speedup).
+ return append(diffs, Diff{DiffDelete, string(text1)})
+ }
+
+ var longtext, shorttext []rune
+ if len(text1) > len(text2) {
+ longtext = text1
+ shorttext = text2
+ } else {
+ longtext = text2
+ shorttext = text1
+ }
+
+ if i := runesIndex(longtext, shorttext); i != -1 {
+ op := DiffInsert
+ // Swap insertions for deletions if diff is reversed.
+ if len(text1) > len(text2) {
+ op = DiffDelete
+ }
+ // Shorter text is inside the longer text (speedup).
+ return []Diff{
+ Diff{op, string(longtext[:i])},
+ Diff{DiffEqual, string(shorttext)},
+ Diff{op, string(longtext[i+len(shorttext):])},
+ }
+ } else if len(shorttext) == 1 {
+ // Single character string.
+ // After the previous speedup, the character can't be an equality.
+ return []Diff{
+ Diff{DiffDelete, string(text1)},
+ Diff{DiffInsert, string(text2)},
+ }
+ // Check to see if the problem can be split in two.
+ } else if hm := dmp.diffHalfMatch(text1, text2); hm != nil {
+ // A half-match was found, sort out the return data.
+ text1A := hm[0]
+ text1B := hm[1]
+ text2A := hm[2]
+ text2B := hm[3]
+ midCommon := hm[4]
+ // Send both pairs off for separate processing.
+ diffsA := dmp.diffMainRunes(text1A, text2A, checklines, deadline)
+ diffsB := dmp.diffMainRunes(text1B, text2B, checklines, deadline)
+ // Merge the results.
+ diffs := diffsA
+ diffs = append(diffs, Diff{DiffEqual, string(midCommon)})
+ diffs = append(diffs, diffsB...)
+ return diffs
+ } else if checklines && len(text1) > 100 && len(text2) > 100 {
+ return dmp.diffLineMode(text1, text2, deadline)
+ }
+ return dmp.diffBisect(text1, text2, deadline)
+}
+
+// diffLineMode does a quick line-level diff on both []runes, then rediff the parts for greater accuracy. This speedup can produce non-minimal diffs.
+func (dmp *DiffMatchPatch) diffLineMode(text1, text2 []rune, deadline time.Time) []Diff {
+ // Scan the text on a line-by-line basis first.
+ text1, text2, linearray := dmp.diffLinesToRunes(text1, text2)
+
+ diffs := dmp.diffMainRunes(text1, text2, false, deadline)
+
+ // Convert the diff back to original text.
+ diffs = dmp.DiffCharsToLines(diffs, linearray)
+ // Eliminate freak matches (e.g. blank lines)
+ diffs = dmp.DiffCleanupSemantic(diffs)
+
+ // Rediff any replacement blocks, this time character-by-character.
+ // Add a dummy entry at the end.
+ diffs = append(diffs, Diff{DiffEqual, ""})
+
+ pointer := 0
+ countDelete := 0
+ countInsert := 0
+
+ // NOTE: Rune slices are slower than using strings in this case.
+ textDelete := ""
+ textInsert := ""
+
+ for pointer < len(diffs) {
+ switch diffs[pointer].Type {
+ case DiffInsert:
+ countInsert++
+ textInsert += diffs[pointer].Text
+ case DiffDelete:
+ countDelete++
+ textDelete += diffs[pointer].Text
+ case DiffEqual:
+ // Upon reaching an equality, check for prior redundancies.
+ if countDelete >= 1 && countInsert >= 1 {
+ // Delete the offending records and add the merged ones.
+ diffs = splice(diffs, pointer-countDelete-countInsert,
+ countDelete+countInsert)
+
+ pointer = pointer - countDelete - countInsert
+ a := dmp.diffMainRunes([]rune(textDelete), []rune(textInsert), false, deadline)
+ for j := len(a) - 1; j >= 0; j-- {
+ diffs = splice(diffs, pointer, 0, a[j])
+ }
+ pointer = pointer + len(a)
+ }
+
+ countInsert = 0
+ countDelete = 0
+ textDelete = ""
+ textInsert = ""
+ }
+ pointer++
+ }
+
+ return diffs[:len(diffs)-1] // Remove the dummy entry at the end.
+}
+
+// DiffBisect finds the 'middle snake' of a diff, split the problem in two and return the recursively constructed diff.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+// See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+func (dmp *DiffMatchPatch) DiffBisect(text1, text2 string, deadline time.Time) []Diff {
+ // Unused in this code, but retained for interface compatibility.
+ return dmp.diffBisect([]rune(text1), []rune(text2), deadline)
+}
+
+// diffBisect finds the 'middle snake' of a diff, splits the problem in two and returns the recursively constructed diff.
+// See Myers's 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+func (dmp *DiffMatchPatch) diffBisect(runes1, runes2 []rune, deadline time.Time) []Diff {
+ // Cache the text lengths to prevent multiple calls.
+ runes1Len, runes2Len := len(runes1), len(runes2)
+
+ maxD := (runes1Len + runes2Len + 1) / 2
+ vOffset := maxD
+ vLength := 2 * maxD
+
+ v1 := make([]int, vLength)
+ v2 := make([]int, vLength)
+ for i := range v1 {
+ v1[i] = -1
+ v2[i] = -1
+ }
+ v1[vOffset+1] = 0
+ v2[vOffset+1] = 0
+
+ delta := runes1Len - runes2Len
+ // If the total number of characters is odd, then the front path will collide with the reverse path.
+ front := (delta%2 != 0)
+ // Offsets for start and end of k loop. Prevents mapping of space beyond the grid.
+ k1start := 0
+ k1end := 0
+ k2start := 0
+ k2end := 0
+ for d := 0; d < maxD; d++ {
+ // Bail out if deadline is reached.
+ if !deadline.IsZero() && d%16 == 0 && time.Now().After(deadline) {
+ break
+ }
+
+ // Walk the front path one step.
+ for k1 := -d + k1start; k1 <= d-k1end; k1 += 2 {
+ k1Offset := vOffset + k1
+ var x1 int
+
+ if k1 == -d || (k1 != d && v1[k1Offset-1] < v1[k1Offset+1]) {
+ x1 = v1[k1Offset+1]
+ } else {
+ x1 = v1[k1Offset-1] + 1
+ }
+
+ y1 := x1 - k1
+ for x1 < runes1Len && y1 < runes2Len {
+ if runes1[x1] != runes2[y1] {
+ break
+ }
+ x1++
+ y1++
+ }
+ v1[k1Offset] = x1
+ if x1 > runes1Len {
+ // Ran off the right of the graph.
+ k1end += 2
+ } else if y1 > runes2Len {
+ // Ran off the bottom of the graph.
+ k1start += 2
+ } else if front {
+ k2Offset := vOffset + delta - k1
+ if k2Offset >= 0 && k2Offset < vLength && v2[k2Offset] != -1 {
+ // Mirror x2 onto top-left coordinate system.
+ x2 := runes1Len - v2[k2Offset]
+ if x1 >= x2 {
+ // Overlap detected.
+ return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
+ }
+ }
+ }
+ }
+ // Walk the reverse path one step.
+ for k2 := -d + k2start; k2 <= d-k2end; k2 += 2 {
+ k2Offset := vOffset + k2
+ var x2 int
+ if k2 == -d || (k2 != d && v2[k2Offset-1] < v2[k2Offset+1]) {
+ x2 = v2[k2Offset+1]
+ } else {
+ x2 = v2[k2Offset-1] + 1
+ }
+ var y2 = x2 - k2
+ for x2 < runes1Len && y2 < runes2Len {
+ if runes1[runes1Len-x2-1] != runes2[runes2Len-y2-1] {
+ break
+ }
+ x2++
+ y2++
+ }
+ v2[k2Offset] = x2
+ if x2 > runes1Len {
+ // Ran off the left of the graph.
+ k2end += 2
+ } else if y2 > runes2Len {
+ // Ran off the top of the graph.
+ k2start += 2
+ } else if !front {
+ k1Offset := vOffset + delta - k2
+ if k1Offset >= 0 && k1Offset < vLength && v1[k1Offset] != -1 {
+ x1 := v1[k1Offset]
+ y1 := vOffset + x1 - k1Offset
+ // Mirror x2 onto top-left coordinate system.
+ x2 = runes1Len - x2
+ if x1 >= x2 {
+ // Overlap detected.
+ return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
+ }
+ }
+ }
+ }
+ }
+ // Diff took too long and hit the deadline or number of diffs equals number of characters, no commonality at all.
+ return []Diff{
+ Diff{DiffDelete, string(runes1)},
+ Diff{DiffInsert, string(runes2)},
+ }
+}
+
+func (dmp *DiffMatchPatch) diffBisectSplit(runes1, runes2 []rune, x, y int,
+ deadline time.Time) []Diff {
+ runes1a := runes1[:x]
+ runes2a := runes2[:y]
+ runes1b := runes1[x:]
+ runes2b := runes2[y:]
+
+ // Compute both diffs serially.
+ diffs := dmp.diffMainRunes(runes1a, runes2a, false, deadline)
+ diffsb := dmp.diffMainRunes(runes1b, runes2b, false, deadline)
+
+ return append(diffs, diffsb...)
+}
+
+// DiffLinesToChars splits two texts into a list of strings, and educes the texts to a string of hashes where each Unicode character represents one line.
+// It's slightly faster to call DiffLinesToRunes first, followed by DiffMainRunes.
+func (dmp *DiffMatchPatch) DiffLinesToChars(text1, text2 string) (string, string, []string) {
+ chars1, chars2, lineArray := dmp.DiffLinesToRunes(text1, text2)
+ return string(chars1), string(chars2), lineArray
+}
+
+// DiffLinesToRunes splits two texts into a list of runes. Each rune represents one line.
+func (dmp *DiffMatchPatch) DiffLinesToRunes(text1, text2 string) ([]rune, []rune, []string) {
+ // '\x00' is a valid character, but various debuggers don't like it. So we'll insert a junk entry to avoid generating a null character.
+ lineArray := []string{""} // e.g. lineArray[4] == 'Hello\n'
+ lineHash := map[string]int{} // e.g. lineHash['Hello\n'] == 4
+
+ chars1 := dmp.diffLinesToRunesMunge(text1, &lineArray, lineHash)
+ chars2 := dmp.diffLinesToRunesMunge(text2, &lineArray, lineHash)
+
+ return chars1, chars2, lineArray
+}
+
+func (dmp *DiffMatchPatch) diffLinesToRunes(text1, text2 []rune) ([]rune, []rune, []string) {
+ return dmp.DiffLinesToRunes(string(text1), string(text2))
+}
+
+// diffLinesToRunesMunge splits a text into an array of strings, and reduces the texts to a []rune where each Unicode character represents one line.
+// We use strings instead of []runes as input mainly because you can't use []rune as a map key.
+func (dmp *DiffMatchPatch) diffLinesToRunesMunge(text string, lineArray *[]string, lineHash map[string]int) []rune {
+ // Walk the text, pulling out a substring for each line. text.split('\n') would would temporarily double our memory footprint. Modifying text would create many large strings to garbage collect.
+ lineStart := 0
+ lineEnd := -1
+ runes := []rune{}
+
+ for lineEnd < len(text)-1 {
+ lineEnd = indexOf(text, "\n", lineStart)
+
+ if lineEnd == -1 {
+ lineEnd = len(text) - 1
+ }
+
+ line := text[lineStart : lineEnd+1]
+ lineStart = lineEnd + 1
+ lineValue, ok := lineHash[line]
+
+ if ok {
+ runes = append(runes, rune(lineValue))
+ } else {
+ *lineArray = append(*lineArray, line)
+ lineHash[line] = len(*lineArray) - 1
+ runes = append(runes, rune(len(*lineArray)-1))
+ }
+ }
+
+ return runes
+}
+
+// DiffCharsToLines rehydrates the text in a diff from a string of line hashes to real lines of text.
+func (dmp *DiffMatchPatch) DiffCharsToLines(diffs []Diff, lineArray []string) []Diff {
+ hydrated := make([]Diff, 0, len(diffs))
+ for _, aDiff := range diffs {
+ chars := aDiff.Text
+ text := make([]string, len(chars))
+
+ for i, r := range chars {
+ text[i] = lineArray[r]
+ }
+
+ aDiff.Text = strings.Join(text, "")
+ hydrated = append(hydrated, aDiff)
+ }
+ return hydrated
+}
+
+// DiffCommonPrefix determines the common prefix length of two strings.
+func (dmp *DiffMatchPatch) DiffCommonPrefix(text1, text2 string) int {
+ // Unused in this code, but retained for interface compatibility.
+ return commonPrefixLength([]rune(text1), []rune(text2))
+}
+
+// DiffCommonSuffix determines the common suffix length of two strings.
+func (dmp *DiffMatchPatch) DiffCommonSuffix(text1, text2 string) int {
+ // Unused in this code, but retained for interface compatibility.
+ return commonSuffixLength([]rune(text1), []rune(text2))
+}
+
+// commonPrefixLength returns the length of the common prefix of two rune slices.
+func commonPrefixLength(text1, text2 []rune) int {
+ // Linear search. See comment in commonSuffixLength.
+ n := 0
+ for ; n < len(text1) && n < len(text2); n++ {
+ if text1[n] != text2[n] {
+ return n
+ }
+ }
+ return n
+}
+
+// commonSuffixLength returns the length of the common suffix of two rune slices.
+func commonSuffixLength(text1, text2 []rune) int {
+ // Use linear search rather than the binary search discussed at https://neil.fraser.name/news/2007/10/09/.
+ // See discussion at https://github.com/sergi/go-diff/issues/54.
+ i1 := len(text1)
+ i2 := len(text2)
+ for n := 0; ; n++ {
+ i1--
+ i2--
+ if i1 < 0 || i2 < 0 || text1[i1] != text2[i2] {
+ return n
+ }
+ }
+}
+
+// DiffCommonOverlap determines if the suffix of one string is the prefix of another.
+func (dmp *DiffMatchPatch) DiffCommonOverlap(text1 string, text2 string) int {
+ // Cache the text lengths to prevent multiple calls.
+ text1Length := len(text1)
+ text2Length := len(text2)
+ // Eliminate the null case.
+ if text1Length == 0 || text2Length == 0 {
+ return 0
+ }
+ // Truncate the longer string.
+ if text1Length > text2Length {
+ text1 = text1[text1Length-text2Length:]
+ } else if text1Length < text2Length {
+ text2 = text2[0:text1Length]
+ }
+ textLength := int(math.Min(float64(text1Length), float64(text2Length)))
+ // Quick check for the worst case.
+ if text1 == text2 {
+ return textLength
+ }
+
+ // Start by looking for a single character match and increase length until no match is found. Performance analysis: http://neil.fraser.name/news/2010/11/04/
+ best := 0
+ length := 1
+ for {
+ pattern := text1[textLength-length:]
+ found := strings.Index(text2, pattern)
+ if found == -1 {
+ break
+ }
+ length += found
+ if found == 0 || text1[textLength-length:] == text2[0:length] {
+ best = length
+ length++
+ }
+ }
+
+ return best
+}
+
+// DiffHalfMatch checks whether the two texts share a substring which is at least half the length of the longer text. This speedup can produce non-minimal diffs.
+func (dmp *DiffMatchPatch) DiffHalfMatch(text1, text2 string) []string {
+ // Unused in this code, but retained for interface compatibility.
+ runeSlices := dmp.diffHalfMatch([]rune(text1), []rune(text2))
+ if runeSlices == nil {
+ return nil
+ }
+
+ result := make([]string, len(runeSlices))
+ for i, r := range runeSlices {
+ result[i] = string(r)
+ }
+ return result
+}
+
+func (dmp *DiffMatchPatch) diffHalfMatch(text1, text2 []rune) [][]rune {
+ if dmp.DiffTimeout <= 0 {
+ // Don't risk returning a non-optimal diff if we have unlimited time.
+ return nil
+ }
+
+ var longtext, shorttext []rune
+ if len(text1) > len(text2) {
+ longtext = text1
+ shorttext = text2
+ } else {
+ longtext = text2
+ shorttext = text1
+ }
+
+ if len(longtext) < 4 || len(shorttext)*2 < len(longtext) {
+ return nil // Pointless.
+ }
+
+ // First check if the second quarter is the seed for a half-match.
+ hm1 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+3)/4))
+
+ // Check again based on the third quarter.
+ hm2 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+1)/2))
+
+ hm := [][]rune{}
+ if hm1 == nil && hm2 == nil {
+ return nil
+ } else if hm2 == nil {
+ hm = hm1
+ } else if hm1 == nil {
+ hm = hm2
+ } else {
+ // Both matched. Select the longest.
+ if len(hm1[4]) > len(hm2[4]) {
+ hm = hm1
+ } else {
+ hm = hm2
+ }
+ }
+
+ // A half-match was found, sort out the return data.
+ if len(text1) > len(text2) {
+ return hm
+ }
+
+ return [][]rune{hm[2], hm[3], hm[0], hm[1], hm[4]}
+}
+
+// diffHalfMatchI checks if a substring of shorttext exist within longtext such that the substring is at least half the length of longtext?
+// Returns a slice containing the prefix of longtext, the suffix of longtext, the prefix of shorttext, the suffix of shorttext and the common middle, or null if there was no match.
+func (dmp *DiffMatchPatch) diffHalfMatchI(l, s []rune, i int) [][]rune {
+ var bestCommonA []rune
+ var bestCommonB []rune
+ var bestCommonLen int
+ var bestLongtextA []rune
+ var bestLongtextB []rune
+ var bestShorttextA []rune
+ var bestShorttextB []rune
+
+ // Start with a 1/4 length substring at position i as a seed.
+ seed := l[i : i+len(l)/4]
+
+ for j := runesIndexOf(s, seed, 0); j != -1; j = runesIndexOf(s, seed, j+1) {
+ prefixLength := commonPrefixLength(l[i:], s[j:])
+ suffixLength := commonSuffixLength(l[:i], s[:j])
+
+ if bestCommonLen < suffixLength+prefixLength {
+ bestCommonA = s[j-suffixLength : j]
+ bestCommonB = s[j : j+prefixLength]
+ bestCommonLen = len(bestCommonA) + len(bestCommonB)
+ bestLongtextA = l[:i-suffixLength]
+ bestLongtextB = l[i+prefixLength:]
+ bestShorttextA = s[:j-suffixLength]
+ bestShorttextB = s[j+prefixLength:]
+ }
+ }
+
+ if bestCommonLen*2 < len(l) {
+ return nil
+ }
+
+ return [][]rune{
+ bestLongtextA,
+ bestLongtextB,
+ bestShorttextA,
+ bestShorttextB,
+ append(bestCommonA, bestCommonB...),
+ }
+}
+
+// DiffCleanupSemantic reduces the number of edits by eliminating semantically trivial equalities.
+func (dmp *DiffMatchPatch) DiffCleanupSemantic(diffs []Diff) []Diff {
+ changes := false
+ // Stack of indices where equalities are found.
+ equalities := make([]int, 0, len(diffs))
+
+ var lastequality string
+ // Always equal to diffs[equalities[equalitiesLength - 1]][1]
+ var pointer int // Index of current position.
+ // Number of characters that changed prior to the equality.
+ var lengthInsertions1, lengthDeletions1 int
+ // Number of characters that changed after the equality.
+ var lengthInsertions2, lengthDeletions2 int
+
+ for pointer < len(diffs) {
+ if diffs[pointer].Type == DiffEqual {
+ // Equality found.
+ equalities = append(equalities, pointer)
+ lengthInsertions1 = lengthInsertions2
+ lengthDeletions1 = lengthDeletions2
+ lengthInsertions2 = 0
+ lengthDeletions2 = 0
+ lastequality = diffs[pointer].Text
+ } else {
+ // An insertion or deletion.
+
+ if diffs[pointer].Type == DiffInsert {
+ lengthInsertions2 += len(diffs[pointer].Text)
+ } else {
+ lengthDeletions2 += len(diffs[pointer].Text)
+ }
+ // Eliminate an equality that is smaller or equal to the edits on both sides of it.
+ difference1 := int(math.Max(float64(lengthInsertions1), float64(lengthDeletions1)))
+ difference2 := int(math.Max(float64(lengthInsertions2), float64(lengthDeletions2)))
+ if len(lastequality) > 0 &&
+ (len(lastequality) <= difference1) &&
+ (len(lastequality) <= difference2) {
+ // Duplicate record.
+ insPoint := equalities[len(equalities)-1]
+ diffs = splice(diffs, insPoint, 0, Diff{DiffDelete, lastequality})
+
+ // Change second copy to insert.
+ diffs[insPoint+1].Type = DiffInsert
+ // Throw away the equality we just deleted.
+ equalities = equalities[:len(equalities)-1]
+
+ if len(equalities) > 0 {
+ equalities = equalities[:len(equalities)-1]
+ }
+ pointer = -1
+ if len(equalities) > 0 {
+ pointer = equalities[len(equalities)-1]
+ }
+
+ lengthInsertions1 = 0 // Reset the counters.
+ lengthDeletions1 = 0
+ lengthInsertions2 = 0
+ lengthDeletions2 = 0
+ lastequality = ""
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ // Normalize the diff.
+ if changes {
+ diffs = dmp.DiffCleanupMerge(diffs)
+ }
+ diffs = dmp.DiffCleanupSemanticLossless(diffs)
+ // Find any overlaps between deletions and insertions.
+ // e.g: abcxxxxxxdef
+ // -> abcxxxdef
+ // e.g: xxxabcdefxxx
+ // -> defxxxabc
+ // Only extract an overlap if it is as big as the edit ahead or behind it.
+ pointer = 1
+ for pointer < len(diffs) {
+ if diffs[pointer-1].Type == DiffDelete &&
+ diffs[pointer].Type == DiffInsert {
+ deletion := diffs[pointer-1].Text
+ insertion := diffs[pointer].Text
+ overlapLength1 := dmp.DiffCommonOverlap(deletion, insertion)
+ overlapLength2 := dmp.DiffCommonOverlap(insertion, deletion)
+ if overlapLength1 >= overlapLength2 {
+ if float64(overlapLength1) >= float64(len(deletion))/2 ||
+ float64(overlapLength1) >= float64(len(insertion))/2 {
+
+ // Overlap found. Insert an equality and trim the surrounding edits.
+ diffs = splice(diffs, pointer, 0, Diff{DiffEqual, insertion[:overlapLength1]})
+ diffs[pointer-1].Text =
+ deletion[0 : len(deletion)-overlapLength1]
+ diffs[pointer+1].Text = insertion[overlapLength1:]
+ pointer++
+ }
+ } else {
+ if float64(overlapLength2) >= float64(len(deletion))/2 ||
+ float64(overlapLength2) >= float64(len(insertion))/2 {
+ // Reverse overlap found. Insert an equality and swap and trim the surrounding edits.
+ overlap := Diff{DiffEqual, deletion[:overlapLength2]}
+ diffs = splice(diffs, pointer, 0, overlap)
+ diffs[pointer-1].Type = DiffInsert
+ diffs[pointer-1].Text = insertion[0 : len(insertion)-overlapLength2]
+ diffs[pointer+1].Type = DiffDelete
+ diffs[pointer+1].Text = deletion[overlapLength2:]
+ pointer++
+ }
+ }
+ pointer++
+ }
+ pointer++
+ }
+
+ return diffs
+}
+
+// Define some regex patterns for matching boundaries.
+var (
+ nonAlphaNumericRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
+ whitespaceRegex = regexp.MustCompile(`\s`)
+ linebreakRegex = regexp.MustCompile(`[\r\n]`)
+ blanklineEndRegex = regexp.MustCompile(`\n\r?\n$`)
+ blanklineStartRegex = regexp.MustCompile(`^\r?\n\r?\n`)
+)
+
+// diffCleanupSemanticScore computes a score representing whether the internal boundary falls on logical boundaries.
+// Scores range from 6 (best) to 0 (worst). Closure, but does not reference any external variables.
+func diffCleanupSemanticScore(one, two string) int {
+ if len(one) == 0 || len(two) == 0 {
+ // Edges are the best.
+ return 6
+ }
+
+ // Each port of this function behaves slightly differently due to subtle differences in each language's definition of things like 'whitespace'. Since this function's purpose is largely cosmetic, the choice has been made to use each language's native features rather than force total conformity.
+ rune1, _ := utf8.DecodeLastRuneInString(one)
+ rune2, _ := utf8.DecodeRuneInString(two)
+ char1 := string(rune1)
+ char2 := string(rune2)
+
+ nonAlphaNumeric1 := nonAlphaNumericRegex.MatchString(char1)
+ nonAlphaNumeric2 := nonAlphaNumericRegex.MatchString(char2)
+ whitespace1 := nonAlphaNumeric1 && whitespaceRegex.MatchString(char1)
+ whitespace2 := nonAlphaNumeric2 && whitespaceRegex.MatchString(char2)
+ lineBreak1 := whitespace1 && linebreakRegex.MatchString(char1)
+ lineBreak2 := whitespace2 && linebreakRegex.MatchString(char2)
+ blankLine1 := lineBreak1 && blanklineEndRegex.MatchString(one)
+ blankLine2 := lineBreak2 && blanklineEndRegex.MatchString(two)
+
+ if blankLine1 || blankLine2 {
+ // Five points for blank lines.
+ return 5
+ } else if lineBreak1 || lineBreak2 {
+ // Four points for line breaks.
+ return 4
+ } else if nonAlphaNumeric1 && !whitespace1 && whitespace2 {
+ // Three points for end of sentences.
+ return 3
+ } else if whitespace1 || whitespace2 {
+ // Two points for whitespace.
+ return 2
+ } else if nonAlphaNumeric1 || nonAlphaNumeric2 {
+ // One point for non-alphanumeric.
+ return 1
+ }
+ return 0
+}
+
+// DiffCleanupSemanticLossless looks for single edits surrounded on both sides by equalities which can be shifted sideways to align the edit to a word boundary.
+// E.g: The cat came. -> The cat came.
+func (dmp *DiffMatchPatch) DiffCleanupSemanticLossless(diffs []Diff) []Diff {
+ pointer := 1
+
+ // Intentionally ignore the first and last element (don't need checking).
+ for pointer < len(diffs)-1 {
+ if diffs[pointer-1].Type == DiffEqual &&
+ diffs[pointer+1].Type == DiffEqual {
+
+ // This is a single edit surrounded by equalities.
+ equality1 := diffs[pointer-1].Text
+ edit := diffs[pointer].Text
+ equality2 := diffs[pointer+1].Text
+
+ // First, shift the edit as far left as possible.
+ commonOffset := dmp.DiffCommonSuffix(equality1, edit)
+ if commonOffset > 0 {
+ commonString := edit[len(edit)-commonOffset:]
+ equality1 = equality1[0 : len(equality1)-commonOffset]
+ edit = commonString + edit[:len(edit)-commonOffset]
+ equality2 = commonString + equality2
+ }
+
+ // Second, step character by character right, looking for the best fit.
+ bestEquality1 := equality1
+ bestEdit := edit
+ bestEquality2 := equality2
+ bestScore := diffCleanupSemanticScore(equality1, edit) +
+ diffCleanupSemanticScore(edit, equality2)
+
+ for len(edit) != 0 && len(equality2) != 0 {
+ _, sz := utf8.DecodeRuneInString(edit)
+ if len(equality2) < sz || edit[:sz] != equality2[:sz] {
+ break
+ }
+ equality1 += edit[:sz]
+ edit = edit[sz:] + equality2[:sz]
+ equality2 = equality2[sz:]
+ score := diffCleanupSemanticScore(equality1, edit) +
+ diffCleanupSemanticScore(edit, equality2)
+ // The >= encourages trailing rather than leading whitespace on edits.
+ if score >= bestScore {
+ bestScore = score
+ bestEquality1 = equality1
+ bestEdit = edit
+ bestEquality2 = equality2
+ }
+ }
+
+ if diffs[pointer-1].Text != bestEquality1 {
+ // We have an improvement, save it back to the diff.
+ if len(bestEquality1) != 0 {
+ diffs[pointer-1].Text = bestEquality1
+ } else {
+ diffs = splice(diffs, pointer-1, 1)
+ pointer--
+ }
+
+ diffs[pointer].Text = bestEdit
+ if len(bestEquality2) != 0 {
+ diffs[pointer+1].Text = bestEquality2
+ } else {
+ diffs = append(diffs[:pointer+1], diffs[pointer+2:]...)
+ pointer--
+ }
+ }
+ }
+ pointer++
+ }
+
+ return diffs
+}
+
+// DiffCleanupEfficiency reduces the number of edits by eliminating operationally trivial equalities.
+func (dmp *DiffMatchPatch) DiffCleanupEfficiency(diffs []Diff) []Diff {
+ changes := false
+ // Stack of indices where equalities are found.
+ type equality struct {
+ data int
+ next *equality
+ }
+ var equalities *equality
+ // Always equal to equalities[equalitiesLength-1][1]
+ lastequality := ""
+ pointer := 0 // Index of current position.
+ // Is there an insertion operation before the last equality.
+ preIns := false
+ // Is there a deletion operation before the last equality.
+ preDel := false
+ // Is there an insertion operation after the last equality.
+ postIns := false
+ // Is there a deletion operation after the last equality.
+ postDel := false
+ for pointer < len(diffs) {
+ if diffs[pointer].Type == DiffEqual { // Equality found.
+ if len(diffs[pointer].Text) < dmp.DiffEditCost &&
+ (postIns || postDel) {
+ // Candidate found.
+ equalities = &equality{
+ data: pointer,
+ next: equalities,
+ }
+ preIns = postIns
+ preDel = postDel
+ lastequality = diffs[pointer].Text
+ } else {
+ // Not a candidate, and can never become one.
+ equalities = nil
+ lastequality = ""
+ }
+ postIns = false
+ postDel = false
+ } else { // An insertion or deletion.
+ if diffs[pointer].Type == DiffDelete {
+ postDel = true
+ } else {
+ postIns = true
+ }
+
+ // Five types to be split:
+ // ABXYCD
+ // AXCD
+ // ABXC
+ // AXCD
+ // ABXC
+ var sumPres int
+ if preIns {
+ sumPres++
+ }
+ if preDel {
+ sumPres++
+ }
+ if postIns {
+ sumPres++
+ }
+ if postDel {
+ sumPres++
+ }
+ if len(lastequality) > 0 &&
+ ((preIns && preDel && postIns && postDel) ||
+ ((len(lastequality) < dmp.DiffEditCost/2) && sumPres == 3)) {
+
+ insPoint := equalities.data
+
+ // Duplicate record.
+ diffs = splice(diffs, insPoint, 0, Diff{DiffDelete, lastequality})
+
+ // Change second copy to insert.
+ diffs[insPoint+1].Type = DiffInsert
+ // Throw away the equality we just deleted.
+ equalities = equalities.next
+ lastequality = ""
+
+ if preIns && preDel {
+ // No changes made which could affect previous entry, keep going.
+ postIns = true
+ postDel = true
+ equalities = nil
+ } else {
+ if equalities != nil {
+ equalities = equalities.next
+ }
+ if equalities != nil {
+ pointer = equalities.data
+ } else {
+ pointer = -1
+ }
+ postIns = false
+ postDel = false
+ }
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ if changes {
+ diffs = dmp.DiffCleanupMerge(diffs)
+ }
+
+ return diffs
+}
+
+// DiffCleanupMerge reorders and merges like edit sections. Merge equalities.
+// Any edit section can move as long as it doesn't cross an equality.
+func (dmp *DiffMatchPatch) DiffCleanupMerge(diffs []Diff) []Diff {
+ // Add a dummy entry at the end.
+ diffs = append(diffs, Diff{DiffEqual, ""})
+ pointer := 0
+ countDelete := 0
+ countInsert := 0
+ commonlength := 0
+ textDelete := []rune(nil)
+ textInsert := []rune(nil)
+
+ for pointer < len(diffs) {
+ switch diffs[pointer].Type {
+ case DiffInsert:
+ countInsert++
+ textInsert = append(textInsert, []rune(diffs[pointer].Text)...)
+ pointer++
+ break
+ case DiffDelete:
+ countDelete++
+ textDelete = append(textDelete, []rune(diffs[pointer].Text)...)
+ pointer++
+ break
+ case DiffEqual:
+ // Upon reaching an equality, check for prior redundancies.
+ if countDelete+countInsert > 1 {
+ if countDelete != 0 && countInsert != 0 {
+ // Factor out any common prefixies.
+ commonlength = commonPrefixLength(textInsert, textDelete)
+ if commonlength != 0 {
+ x := pointer - countDelete - countInsert
+ if x > 0 && diffs[x-1].Type == DiffEqual {
+ diffs[x-1].Text += string(textInsert[:commonlength])
+ } else {
+ diffs = append([]Diff{Diff{DiffEqual, string(textInsert[:commonlength])}}, diffs...)
+ pointer++
+ }
+ textInsert = textInsert[commonlength:]
+ textDelete = textDelete[commonlength:]
+ }
+ // Factor out any common suffixies.
+ commonlength = commonSuffixLength(textInsert, textDelete)
+ if commonlength != 0 {
+ insertIndex := len(textInsert) - commonlength
+ deleteIndex := len(textDelete) - commonlength
+ diffs[pointer].Text = string(textInsert[insertIndex:]) + diffs[pointer].Text
+ textInsert = textInsert[:insertIndex]
+ textDelete = textDelete[:deleteIndex]
+ }
+ }
+ // Delete the offending records and add the merged ones.
+ if countDelete == 0 {
+ diffs = splice(diffs, pointer-countInsert,
+ countDelete+countInsert,
+ Diff{DiffInsert, string(textInsert)})
+ } else if countInsert == 0 {
+ diffs = splice(diffs, pointer-countDelete,
+ countDelete+countInsert,
+ Diff{DiffDelete, string(textDelete)})
+ } else {
+ diffs = splice(diffs, pointer-countDelete-countInsert,
+ countDelete+countInsert,
+ Diff{DiffDelete, string(textDelete)},
+ Diff{DiffInsert, string(textInsert)})
+ }
+
+ pointer = pointer - countDelete - countInsert + 1
+ if countDelete != 0 {
+ pointer++
+ }
+ if countInsert != 0 {
+ pointer++
+ }
+ } else if pointer != 0 && diffs[pointer-1].Type == DiffEqual {
+ // Merge this equality with the previous one.
+ diffs[pointer-1].Text += diffs[pointer].Text
+ diffs = append(diffs[:pointer], diffs[pointer+1:]...)
+ } else {
+ pointer++
+ }
+ countInsert = 0
+ countDelete = 0
+ textDelete = nil
+ textInsert = nil
+ break
+ }
+ }
+
+ if len(diffs[len(diffs)-1].Text) == 0 {
+ diffs = diffs[0 : len(diffs)-1] // Remove the dummy entry at the end.
+ }
+
+ // Second pass: look for single edits surrounded on both sides by equalities which can be shifted sideways to eliminate an equality. E.g: ABAC -> ABAC
+ changes := false
+ pointer = 1
+ // Intentionally ignore the first and last element (don't need checking).
+ for pointer < (len(diffs) - 1) {
+ if diffs[pointer-1].Type == DiffEqual &&
+ diffs[pointer+1].Type == DiffEqual {
+ // This is a single edit surrounded by equalities.
+ if strings.HasSuffix(diffs[pointer].Text, diffs[pointer-1].Text) {
+ // Shift the edit over the previous equality.
+ diffs[pointer].Text = diffs[pointer-1].Text +
+ diffs[pointer].Text[:len(diffs[pointer].Text)-len(diffs[pointer-1].Text)]
+ diffs[pointer+1].Text = diffs[pointer-1].Text + diffs[pointer+1].Text
+ diffs = splice(diffs, pointer-1, 1)
+ changes = true
+ } else if strings.HasPrefix(diffs[pointer].Text, diffs[pointer+1].Text) {
+ // Shift the edit over the next equality.
+ diffs[pointer-1].Text += diffs[pointer+1].Text
+ diffs[pointer].Text =
+ diffs[pointer].Text[len(diffs[pointer+1].Text):] + diffs[pointer+1].Text
+ diffs = splice(diffs, pointer+1, 1)
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ // If shifts were made, the diff needs reordering and another shift sweep.
+ if changes {
+ diffs = dmp.DiffCleanupMerge(diffs)
+ }
+
+ return diffs
+}
+
+// DiffXIndex returns the equivalent location in s2.
+func (dmp *DiffMatchPatch) DiffXIndex(diffs []Diff, loc int) int {
+ chars1 := 0
+ chars2 := 0
+ lastChars1 := 0
+ lastChars2 := 0
+ lastDiff := Diff{}
+ for i := 0; i < len(diffs); i++ {
+ aDiff := diffs[i]
+ if aDiff.Type != DiffInsert {
+ // Equality or deletion.
+ chars1 += len(aDiff.Text)
+ }
+ if aDiff.Type != DiffDelete {
+ // Equality or insertion.
+ chars2 += len(aDiff.Text)
+ }
+ if chars1 > loc {
+ // Overshot the location.
+ lastDiff = aDiff
+ break
+ }
+ lastChars1 = chars1
+ lastChars2 = chars2
+ }
+ if lastDiff.Type == DiffDelete {
+ // The location was deleted.
+ return lastChars2
+ }
+ // Add the remaining character length.
+ return lastChars2 + (loc - lastChars1)
+}
+
+// DiffPrettyHtml converts a []Diff into a pretty HTML report.
+// It is intended as an example from which to write one's own display functions.
+func (dmp *DiffMatchPatch) DiffPrettyHtml(diffs []Diff) string {
+ var buff bytes.Buffer
+ for _, diff := range diffs {
+ text := strings.Replace(html.EscapeString(diff.Text), "\n", "¶ ", -1)
+ switch diff.Type {
+ case DiffInsert:
+ _, _ = buff.WriteString("")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("")
+ case DiffDelete:
+ _, _ = buff.WriteString("")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("")
+ case DiffEqual:
+ _, _ = buff.WriteString("")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("")
+ }
+ }
+ return buff.String()
+}
+
+// DiffPrettyText converts a []Diff into a colored text report.
+func (dmp *DiffMatchPatch) DiffPrettyText(diffs []Diff) string {
+ var buff bytes.Buffer
+ for _, diff := range diffs {
+ text := diff.Text
+
+ switch diff.Type {
+ case DiffInsert:
+ _, _ = buff.WriteString("\x1b[32m")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("\x1b[0m")
+ case DiffDelete:
+ _, _ = buff.WriteString("\x1b[31m")
+ _, _ = buff.WriteString(text)
+ _, _ = buff.WriteString("\x1b[0m")
+ case DiffEqual:
+ _, _ = buff.WriteString(text)
+ }
+ }
+
+ return buff.String()
+}
+
+// DiffText1 computes and returns the source text (all equalities and deletions).
+func (dmp *DiffMatchPatch) DiffText1(diffs []Diff) string {
+ //StringBuilder text = new StringBuilder()
+ var text bytes.Buffer
+
+ for _, aDiff := range diffs {
+ if aDiff.Type != DiffInsert {
+ _, _ = text.WriteString(aDiff.Text)
+ }
+ }
+ return text.String()
+}
+
+// DiffText2 computes and returns the destination text (all equalities and insertions).
+func (dmp *DiffMatchPatch) DiffText2(diffs []Diff) string {
+ var text bytes.Buffer
+
+ for _, aDiff := range diffs {
+ if aDiff.Type != DiffDelete {
+ _, _ = text.WriteString(aDiff.Text)
+ }
+ }
+ return text.String()
+}
+
+// DiffLevenshtein computes the Levenshtein distance that is the number of inserted, deleted or substituted characters.
+func (dmp *DiffMatchPatch) DiffLevenshtein(diffs []Diff) int {
+ levenshtein := 0
+ insertions := 0
+ deletions := 0
+
+ for _, aDiff := range diffs {
+ switch aDiff.Type {
+ case DiffInsert:
+ insertions += utf8.RuneCountInString(aDiff.Text)
+ case DiffDelete:
+ deletions += utf8.RuneCountInString(aDiff.Text)
+ case DiffEqual:
+ // A deletion and an insertion is one substitution.
+ levenshtein += max(insertions, deletions)
+ insertions = 0
+ deletions = 0
+ }
+ }
+
+ levenshtein += max(insertions, deletions)
+ return levenshtein
+}
+
+// DiffToDelta crushes the diff into an encoded string which describes the operations required to transform text1 into text2.
+// E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation.
+func (dmp *DiffMatchPatch) DiffToDelta(diffs []Diff) string {
+ var text bytes.Buffer
+ for _, aDiff := range diffs {
+ switch aDiff.Type {
+ case DiffInsert:
+ _, _ = text.WriteString("+")
+ _, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
+ _, _ = text.WriteString("\t")
+ break
+ case DiffDelete:
+ _, _ = text.WriteString("-")
+ _, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
+ _, _ = text.WriteString("\t")
+ break
+ case DiffEqual:
+ _, _ = text.WriteString("=")
+ _, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
+ _, _ = text.WriteString("\t")
+ break
+ }
+ }
+ delta := text.String()
+ if len(delta) != 0 {
+ // Strip off trailing tab character.
+ delta = delta[0 : utf8.RuneCountInString(delta)-1]
+ delta = unescaper.Replace(delta)
+ }
+ return delta
+}
+
+// DiffFromDelta given the original text1, and an encoded string which describes the operations required to transform text1 into text2, comAdde the full diff.
+func (dmp *DiffMatchPatch) DiffFromDelta(text1 string, delta string) (diffs []Diff, err error) {
+ i := 0
+ runes := []rune(text1)
+
+ for _, token := range strings.Split(delta, "\t") {
+ if len(token) == 0 {
+ // Blank tokens are ok (from a trailing \t).
+ continue
+ }
+
+ // Each token begins with a one character parameter which specifies the operation of this token (delete, insert, equality).
+ param := token[1:]
+
+ switch op := token[0]; op {
+ case '+':
+ // Decode would Diff all "+" to " "
+ param = strings.Replace(param, "+", "%2b", -1)
+ param, err = url.QueryUnescape(param)
+ if err != nil {
+ return nil, err
+ }
+ if !utf8.ValidString(param) {
+ return nil, fmt.Errorf("invalid UTF-8 token: %q", param)
+ }
+
+ diffs = append(diffs, Diff{DiffInsert, param})
+ case '=', '-':
+ n, err := strconv.ParseInt(param, 10, 0)
+ if err != nil {
+ return nil, err
+ } else if n < 0 {
+ return nil, errors.New("Negative number in DiffFromDelta: " + param)
+ }
+
+ i += int(n)
+ // Break out if we are out of bounds, go1.6 can't handle this very well
+ if i > len(runes) {
+ break
+ }
+ // Remember that string slicing is by byte - we want by rune here.
+ text := string(runes[i-int(n) : i])
+
+ if op == '=' {
+ diffs = append(diffs, Diff{DiffEqual, text})
+ } else {
+ diffs = append(diffs, Diff{DiffDelete, text})
+ }
+ default:
+ // Anything else is an error.
+ return nil, errors.New("Invalid diff operation in DiffFromDelta: " + string(token[0]))
+ }
+ }
+
+ if i != len(runes) {
+ return nil, fmt.Errorf("Delta length (%v) is different from source text length (%v)", i, len(text1))
+ }
+
+ return diffs, nil
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/diffmatchpatch.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/diffmatchpatch.go
new file mode 100644
index 00000000..d3acc32c
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/diffmatchpatch.go
@@ -0,0 +1,46 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+// Package diffmatchpatch offers robust algorithms to perform the operations required for synchronizing plain text.
+package diffmatchpatch
+
+import (
+ "time"
+)
+
+// DiffMatchPatch holds the configuration for diff-match-patch operations.
+type DiffMatchPatch struct {
+ // Number of seconds to map a diff before giving up (0 for infinity).
+ DiffTimeout time.Duration
+ // Cost of an empty edit operation in terms of edit characters.
+ DiffEditCost int
+ // How far to search for a match (0 = exact location, 1000+ = broad match). A match this many characters away from the expected location will add 1.0 to the score (0.0 is a perfect match).
+ MatchDistance int
+ // When deleting a large block of text (over ~64 characters), how close do the contents have to be to match the expected contents. (0.0 = perfection, 1.0 = very loose). Note that MatchThreshold controls how closely the end points of a delete need to match.
+ PatchDeleteThreshold float64
+ // Chunk size for context length.
+ PatchMargin int
+ // The number of bits in an int.
+ MatchMaxBits int
+ // At what point is no match declared (0.0 = perfection, 1.0 = very loose).
+ MatchThreshold float64
+}
+
+// New creates a new DiffMatchPatch object with default parameters.
+func New() *DiffMatchPatch {
+ // Defaults.
+ return &DiffMatchPatch{
+ DiffTimeout: time.Second,
+ DiffEditCost: 4,
+ MatchThreshold: 0.5,
+ MatchDistance: 1000,
+ PatchDeleteThreshold: 0.5,
+ PatchMargin: 4,
+ MatchMaxBits: 32,
+ }
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/match.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/match.go
new file mode 100644
index 00000000..17374e10
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/match.go
@@ -0,0 +1,160 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "math"
+)
+
+// MatchMain locates the best instance of 'pattern' in 'text' near 'loc'.
+// Returns -1 if no match found.
+func (dmp *DiffMatchPatch) MatchMain(text, pattern string, loc int) int {
+ // Check for null inputs not needed since null can't be passed in C#.
+
+ loc = int(math.Max(0, math.Min(float64(loc), float64(len(text)))))
+ if text == pattern {
+ // Shortcut (potentially not guaranteed by the algorithm)
+ return 0
+ } else if len(text) == 0 {
+ // Nothing to match.
+ return -1
+ } else if loc+len(pattern) <= len(text) && text[loc:loc+len(pattern)] == pattern {
+ // Perfect match at the perfect spot! (Includes case of null pattern)
+ return loc
+ }
+ // Do a fuzzy compare.
+ return dmp.MatchBitap(text, pattern, loc)
+}
+
+// MatchBitap locates the best instance of 'pattern' in 'text' near 'loc' using the Bitap algorithm.
+// Returns -1 if no match was found.
+func (dmp *DiffMatchPatch) MatchBitap(text, pattern string, loc int) int {
+ // Initialise the alphabet.
+ s := dmp.MatchAlphabet(pattern)
+
+ // Highest score beyond which we give up.
+ scoreThreshold := dmp.MatchThreshold
+ // Is there a nearby exact match? (speedup)
+ bestLoc := indexOf(text, pattern, loc)
+ if bestLoc != -1 {
+ scoreThreshold = math.Min(dmp.matchBitapScore(0, bestLoc, loc,
+ pattern), scoreThreshold)
+ // What about in the other direction? (speedup)
+ bestLoc = lastIndexOf(text, pattern, loc+len(pattern))
+ if bestLoc != -1 {
+ scoreThreshold = math.Min(dmp.matchBitapScore(0, bestLoc, loc,
+ pattern), scoreThreshold)
+ }
+ }
+
+ // Initialise the bit arrays.
+ matchmask := 1 << uint((len(pattern) - 1))
+ bestLoc = -1
+
+ var binMin, binMid int
+ binMax := len(pattern) + len(text)
+ lastRd := []int{}
+ for d := 0; d < len(pattern); d++ {
+ // Scan for the best match; each iteration allows for one more error. Run a binary search to determine how far from 'loc' we can stray at this error level.
+ binMin = 0
+ binMid = binMax
+ for binMin < binMid {
+ if dmp.matchBitapScore(d, loc+binMid, loc, pattern) <= scoreThreshold {
+ binMin = binMid
+ } else {
+ binMax = binMid
+ }
+ binMid = (binMax-binMin)/2 + binMin
+ }
+ // Use the result from this iteration as the maximum for the next.
+ binMax = binMid
+ start := int(math.Max(1, float64(loc-binMid+1)))
+ finish := int(math.Min(float64(loc+binMid), float64(len(text))) + float64(len(pattern)))
+
+ rd := make([]int, finish+2)
+ rd[finish+1] = (1 << uint(d)) - 1
+
+ for j := finish; j >= start; j-- {
+ var charMatch int
+ if len(text) <= j-1 {
+ // Out of range.
+ charMatch = 0
+ } else if _, ok := s[text[j-1]]; !ok {
+ charMatch = 0
+ } else {
+ charMatch = s[text[j-1]]
+ }
+
+ if d == 0 {
+ // First pass: exact match.
+ rd[j] = ((rd[j+1] << 1) | 1) & charMatch
+ } else {
+ // Subsequent passes: fuzzy match.
+ rd[j] = ((rd[j+1]<<1)|1)&charMatch | (((lastRd[j+1] | lastRd[j]) << 1) | 1) | lastRd[j+1]
+ }
+ if (rd[j] & matchmask) != 0 {
+ score := dmp.matchBitapScore(d, j-1, loc, pattern)
+ // This match will almost certainly be better than any existing match. But check anyway.
+ if score <= scoreThreshold {
+ // Told you so.
+ scoreThreshold = score
+ bestLoc = j - 1
+ if bestLoc > loc {
+ // When passing loc, don't exceed our current distance from loc.
+ start = int(math.Max(1, float64(2*loc-bestLoc)))
+ } else {
+ // Already passed loc, downhill from here on in.
+ break
+ }
+ }
+ }
+ }
+ if dmp.matchBitapScore(d+1, loc, loc, pattern) > scoreThreshold {
+ // No hope for a (better) match at greater error levels.
+ break
+ }
+ lastRd = rd
+ }
+ return bestLoc
+}
+
+// matchBitapScore computes and returns the score for a match with e errors and x location.
+func (dmp *DiffMatchPatch) matchBitapScore(e, x, loc int, pattern string) float64 {
+ accuracy := float64(e) / float64(len(pattern))
+ proximity := math.Abs(float64(loc - x))
+ if dmp.MatchDistance == 0 {
+ // Dodge divide by zero error.
+ if proximity == 0 {
+ return accuracy
+ }
+
+ return 1.0
+ }
+ return accuracy + (proximity / float64(dmp.MatchDistance))
+}
+
+// MatchAlphabet initialises the alphabet for the Bitap algorithm.
+func (dmp *DiffMatchPatch) MatchAlphabet(pattern string) map[byte]int {
+ s := map[byte]int{}
+ charPattern := []byte(pattern)
+ for _, c := range charPattern {
+ _, ok := s[c]
+ if !ok {
+ s[c] = 0
+ }
+ }
+ i := 0
+
+ for _, c := range charPattern {
+ value := s[c] | int(uint(1)< y {
+ return x
+ }
+ return y
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/operation_string.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/operation_string.go
new file mode 100644
index 00000000..533ec0da
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/operation_string.go
@@ -0,0 +1,17 @@
+// Code generated by "stringer -type=Operation -trimprefix=Diff"; DO NOT EDIT.
+
+package diffmatchpatch
+
+import "fmt"
+
+const _Operation_name = "DeleteEqualInsert"
+
+var _Operation_index = [...]uint8{0, 6, 11, 17}
+
+func (i Operation) String() string {
+ i -= -1
+ if i < 0 || i >= Operation(len(_Operation_index)-1) {
+ return fmt.Sprintf("Operation(%d)", i+-1)
+ }
+ return _Operation_name[_Operation_index[i]:_Operation_index[i+1]]
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/patch.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/patch.go
new file mode 100644
index 00000000..223c43c4
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/patch.go
@@ -0,0 +1,556 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "bytes"
+ "errors"
+ "math"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// Patch represents one patch operation.
+type Patch struct {
+ diffs []Diff
+ Start1 int
+ Start2 int
+ Length1 int
+ Length2 int
+}
+
+// String emulates GNU diff's format.
+// Header: @@ -382,8 +481,9 @@
+// Indices are printed as 1-based, not 0-based.
+func (p *Patch) String() string {
+ var coords1, coords2 string
+
+ if p.Length1 == 0 {
+ coords1 = strconv.Itoa(p.Start1) + ",0"
+ } else if p.Length1 == 1 {
+ coords1 = strconv.Itoa(p.Start1 + 1)
+ } else {
+ coords1 = strconv.Itoa(p.Start1+1) + "," + strconv.Itoa(p.Length1)
+ }
+
+ if p.Length2 == 0 {
+ coords2 = strconv.Itoa(p.Start2) + ",0"
+ } else if p.Length2 == 1 {
+ coords2 = strconv.Itoa(p.Start2 + 1)
+ } else {
+ coords2 = strconv.Itoa(p.Start2+1) + "," + strconv.Itoa(p.Length2)
+ }
+
+ var text bytes.Buffer
+ _, _ = text.WriteString("@@ -" + coords1 + " +" + coords2 + " @@\n")
+
+ // Escape the body of the patch with %xx notation.
+ for _, aDiff := range p.diffs {
+ switch aDiff.Type {
+ case DiffInsert:
+ _, _ = text.WriteString("+")
+ case DiffDelete:
+ _, _ = text.WriteString("-")
+ case DiffEqual:
+ _, _ = text.WriteString(" ")
+ }
+
+ _, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
+ _, _ = text.WriteString("\n")
+ }
+
+ return unescaper.Replace(text.String())
+}
+
+// PatchAddContext increases the context until it is unique, but doesn't let the pattern expand beyond MatchMaxBits.
+func (dmp *DiffMatchPatch) PatchAddContext(patch Patch, text string) Patch {
+ if len(text) == 0 {
+ return patch
+ }
+
+ pattern := text[patch.Start2 : patch.Start2+patch.Length1]
+ padding := 0
+
+ // Look for the first and last matches of pattern in text. If two different matches are found, increase the pattern length.
+ for strings.Index(text, pattern) != strings.LastIndex(text, pattern) &&
+ len(pattern) < dmp.MatchMaxBits-2*dmp.PatchMargin {
+ padding += dmp.PatchMargin
+ maxStart := max(0, patch.Start2-padding)
+ minEnd := min(len(text), patch.Start2+patch.Length1+padding)
+ pattern = text[maxStart:minEnd]
+ }
+ // Add one chunk for good luck.
+ padding += dmp.PatchMargin
+
+ // Add the prefix.
+ prefix := text[max(0, patch.Start2-padding):patch.Start2]
+ if len(prefix) != 0 {
+ patch.diffs = append([]Diff{Diff{DiffEqual, prefix}}, patch.diffs...)
+ }
+ // Add the suffix.
+ suffix := text[patch.Start2+patch.Length1 : min(len(text), patch.Start2+patch.Length1+padding)]
+ if len(suffix) != 0 {
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, suffix})
+ }
+
+ // Roll back the start points.
+ patch.Start1 -= len(prefix)
+ patch.Start2 -= len(prefix)
+ // Extend the lengths.
+ patch.Length1 += len(prefix) + len(suffix)
+ patch.Length2 += len(prefix) + len(suffix)
+
+ return patch
+}
+
+// PatchMake computes a list of patches.
+func (dmp *DiffMatchPatch) PatchMake(opt ...interface{}) []Patch {
+ if len(opt) == 1 {
+ diffs, _ := opt[0].([]Diff)
+ text1 := dmp.DiffText1(diffs)
+ return dmp.PatchMake(text1, diffs)
+ } else if len(opt) == 2 {
+ text1 := opt[0].(string)
+ switch t := opt[1].(type) {
+ case string:
+ diffs := dmp.DiffMain(text1, t, true)
+ if len(diffs) > 2 {
+ diffs = dmp.DiffCleanupSemantic(diffs)
+ diffs = dmp.DiffCleanupEfficiency(diffs)
+ }
+ return dmp.PatchMake(text1, diffs)
+ case []Diff:
+ return dmp.patchMake2(text1, t)
+ }
+ } else if len(opt) == 3 {
+ return dmp.PatchMake(opt[0], opt[2])
+ }
+ return []Patch{}
+}
+
+// patchMake2 computes a list of patches to turn text1 into text2.
+// text2 is not provided, diffs are the delta between text1 and text2.
+func (dmp *DiffMatchPatch) patchMake2(text1 string, diffs []Diff) []Patch {
+ // Check for null inputs not needed since null can't be passed in C#.
+ patches := []Patch{}
+ if len(diffs) == 0 {
+ return patches // Get rid of the null case.
+ }
+
+ patch := Patch{}
+ charCount1 := 0 // Number of characters into the text1 string.
+ charCount2 := 0 // Number of characters into the text2 string.
+ // Start with text1 (prepatchText) and apply the diffs until we arrive at text2 (postpatchText). We recreate the patches one by one to determine context info.
+ prepatchText := text1
+ postpatchText := text1
+
+ for i, aDiff := range diffs {
+ if len(patch.diffs) == 0 && aDiff.Type != DiffEqual {
+ // A new patch starts here.
+ patch.Start1 = charCount1
+ patch.Start2 = charCount2
+ }
+
+ switch aDiff.Type {
+ case DiffInsert:
+ patch.diffs = append(patch.diffs, aDiff)
+ patch.Length2 += len(aDiff.Text)
+ postpatchText = postpatchText[:charCount2] +
+ aDiff.Text + postpatchText[charCount2:]
+ case DiffDelete:
+ patch.Length1 += len(aDiff.Text)
+ patch.diffs = append(patch.diffs, aDiff)
+ postpatchText = postpatchText[:charCount2] + postpatchText[charCount2+len(aDiff.Text):]
+ case DiffEqual:
+ if len(aDiff.Text) <= 2*dmp.PatchMargin &&
+ len(patch.diffs) != 0 && i != len(diffs)-1 {
+ // Small equality inside a patch.
+ patch.diffs = append(patch.diffs, aDiff)
+ patch.Length1 += len(aDiff.Text)
+ patch.Length2 += len(aDiff.Text)
+ }
+ if len(aDiff.Text) >= 2*dmp.PatchMargin {
+ // Time for a new patch.
+ if len(patch.diffs) != 0 {
+ patch = dmp.PatchAddContext(patch, prepatchText)
+ patches = append(patches, patch)
+ patch = Patch{}
+ // Unlike Unidiff, our patch lists have a rolling context. http://code.google.com/p/google-diff-match-patch/wiki/Unidiff Update prepatch text & pos to reflect the application of the just completed patch.
+ prepatchText = postpatchText
+ charCount1 = charCount2
+ }
+ }
+ }
+
+ // Update the current character count.
+ if aDiff.Type != DiffInsert {
+ charCount1 += len(aDiff.Text)
+ }
+ if aDiff.Type != DiffDelete {
+ charCount2 += len(aDiff.Text)
+ }
+ }
+
+ // Pick up the leftover patch if not empty.
+ if len(patch.diffs) != 0 {
+ patch = dmp.PatchAddContext(patch, prepatchText)
+ patches = append(patches, patch)
+ }
+
+ return patches
+}
+
+// PatchDeepCopy returns an array that is identical to a given an array of patches.
+func (dmp *DiffMatchPatch) PatchDeepCopy(patches []Patch) []Patch {
+ patchesCopy := []Patch{}
+ for _, aPatch := range patches {
+ patchCopy := Patch{}
+ for _, aDiff := range aPatch.diffs {
+ patchCopy.diffs = append(patchCopy.diffs, Diff{
+ aDiff.Type,
+ aDiff.Text,
+ })
+ }
+ patchCopy.Start1 = aPatch.Start1
+ patchCopy.Start2 = aPatch.Start2
+ patchCopy.Length1 = aPatch.Length1
+ patchCopy.Length2 = aPatch.Length2
+ patchesCopy = append(patchesCopy, patchCopy)
+ }
+ return patchesCopy
+}
+
+// PatchApply merges a set of patches onto the text. Returns a patched text, as well as an array of true/false values indicating which patches were applied.
+func (dmp *DiffMatchPatch) PatchApply(patches []Patch, text string) (string, []bool) {
+ if len(patches) == 0 {
+ return text, []bool{}
+ }
+
+ // Deep copy the patches so that no changes are made to originals.
+ patches = dmp.PatchDeepCopy(patches)
+
+ nullPadding := dmp.PatchAddPadding(patches)
+ text = nullPadding + text + nullPadding
+ patches = dmp.PatchSplitMax(patches)
+
+ x := 0
+ // delta keeps track of the offset between the expected and actual location of the previous patch. If there are patches expected at positions 10 and 20, but the first patch was found at 12, delta is 2 and the second patch has an effective expected position of 22.
+ delta := 0
+ results := make([]bool, len(patches))
+ for _, aPatch := range patches {
+ expectedLoc := aPatch.Start2 + delta
+ text1 := dmp.DiffText1(aPatch.diffs)
+ var startLoc int
+ endLoc := -1
+ if len(text1) > dmp.MatchMaxBits {
+ // PatchSplitMax will only provide an oversized pattern in the case of a monster delete.
+ startLoc = dmp.MatchMain(text, text1[:dmp.MatchMaxBits], expectedLoc)
+ if startLoc != -1 {
+ endLoc = dmp.MatchMain(text,
+ text1[len(text1)-dmp.MatchMaxBits:], expectedLoc+len(text1)-dmp.MatchMaxBits)
+ if endLoc == -1 || startLoc >= endLoc {
+ // Can't find valid trailing context. Drop this patch.
+ startLoc = -1
+ }
+ }
+ } else {
+ startLoc = dmp.MatchMain(text, text1, expectedLoc)
+ }
+ if startLoc == -1 {
+ // No match found. :(
+ results[x] = false
+ // Subtract the delta for this failed patch from subsequent patches.
+ delta -= aPatch.Length2 - aPatch.Length1
+ } else {
+ // Found a match. :)
+ results[x] = true
+ delta = startLoc - expectedLoc
+ var text2 string
+ if endLoc == -1 {
+ text2 = text[startLoc:int(math.Min(float64(startLoc+len(text1)), float64(len(text))))]
+ } else {
+ text2 = text[startLoc:int(math.Min(float64(endLoc+dmp.MatchMaxBits), float64(len(text))))]
+ }
+ if text1 == text2 {
+ // Perfect match, just shove the Replacement text in.
+ text = text[:startLoc] + dmp.DiffText2(aPatch.diffs) + text[startLoc+len(text1):]
+ } else {
+ // Imperfect match. Run a diff to get a framework of equivalent indices.
+ diffs := dmp.DiffMain(text1, text2, false)
+ if len(text1) > dmp.MatchMaxBits && float64(dmp.DiffLevenshtein(diffs))/float64(len(text1)) > dmp.PatchDeleteThreshold {
+ // The end points match, but the content is unacceptably bad.
+ results[x] = false
+ } else {
+ diffs = dmp.DiffCleanupSemanticLossless(diffs)
+ index1 := 0
+ for _, aDiff := range aPatch.diffs {
+ if aDiff.Type != DiffEqual {
+ index2 := dmp.DiffXIndex(diffs, index1)
+ if aDiff.Type == DiffInsert {
+ // Insertion
+ text = text[:startLoc+index2] + aDiff.Text + text[startLoc+index2:]
+ } else if aDiff.Type == DiffDelete {
+ // Deletion
+ startIndex := startLoc + index2
+ text = text[:startIndex] +
+ text[startIndex+dmp.DiffXIndex(diffs, index1+len(aDiff.Text))-index2:]
+ }
+ }
+ if aDiff.Type != DiffDelete {
+ index1 += len(aDiff.Text)
+ }
+ }
+ }
+ }
+ }
+ x++
+ }
+ // Strip the padding off.
+ text = text[len(nullPadding) : len(nullPadding)+(len(text)-2*len(nullPadding))]
+ return text, results
+}
+
+// PatchAddPadding adds some padding on text start and end so that edges can match something.
+// Intended to be called only from within patchApply.
+func (dmp *DiffMatchPatch) PatchAddPadding(patches []Patch) string {
+ paddingLength := dmp.PatchMargin
+ nullPadding := ""
+ for x := 1; x <= paddingLength; x++ {
+ nullPadding += string(x)
+ }
+
+ // Bump all the patches forward.
+ for i := range patches {
+ patches[i].Start1 += paddingLength
+ patches[i].Start2 += paddingLength
+ }
+
+ // Add some padding on start of first diff.
+ if len(patches[0].diffs) == 0 || patches[0].diffs[0].Type != DiffEqual {
+ // Add nullPadding equality.
+ patches[0].diffs = append([]Diff{Diff{DiffEqual, nullPadding}}, patches[0].diffs...)
+ patches[0].Start1 -= paddingLength // Should be 0.
+ patches[0].Start2 -= paddingLength // Should be 0.
+ patches[0].Length1 += paddingLength
+ patches[0].Length2 += paddingLength
+ } else if paddingLength > len(patches[0].diffs[0].Text) {
+ // Grow first equality.
+ extraLength := paddingLength - len(patches[0].diffs[0].Text)
+ patches[0].diffs[0].Text = nullPadding[len(patches[0].diffs[0].Text):] + patches[0].diffs[0].Text
+ patches[0].Start1 -= extraLength
+ patches[0].Start2 -= extraLength
+ patches[0].Length1 += extraLength
+ patches[0].Length2 += extraLength
+ }
+
+ // Add some padding on end of last diff.
+ last := len(patches) - 1
+ if len(patches[last].diffs) == 0 || patches[last].diffs[len(patches[last].diffs)-1].Type != DiffEqual {
+ // Add nullPadding equality.
+ patches[last].diffs = append(patches[last].diffs, Diff{DiffEqual, nullPadding})
+ patches[last].Length1 += paddingLength
+ patches[last].Length2 += paddingLength
+ } else if paddingLength > len(patches[last].diffs[len(patches[last].diffs)-1].Text) {
+ // Grow last equality.
+ lastDiff := patches[last].diffs[len(patches[last].diffs)-1]
+ extraLength := paddingLength - len(lastDiff.Text)
+ patches[last].diffs[len(patches[last].diffs)-1].Text += nullPadding[:extraLength]
+ patches[last].Length1 += extraLength
+ patches[last].Length2 += extraLength
+ }
+
+ return nullPadding
+}
+
+// PatchSplitMax looks through the patches and breaks up any which are longer than the maximum limit of the match algorithm.
+// Intended to be called only from within patchApply.
+func (dmp *DiffMatchPatch) PatchSplitMax(patches []Patch) []Patch {
+ patchSize := dmp.MatchMaxBits
+ for x := 0; x < len(patches); x++ {
+ if patches[x].Length1 <= patchSize {
+ continue
+ }
+ bigpatch := patches[x]
+ // Remove the big old patch.
+ patches = append(patches[:x], patches[x+1:]...)
+ x--
+
+ Start1 := bigpatch.Start1
+ Start2 := bigpatch.Start2
+ precontext := ""
+ for len(bigpatch.diffs) != 0 {
+ // Create one of several smaller patches.
+ patch := Patch{}
+ empty := true
+ patch.Start1 = Start1 - len(precontext)
+ patch.Start2 = Start2 - len(precontext)
+ if len(precontext) != 0 {
+ patch.Length1 = len(precontext)
+ patch.Length2 = len(precontext)
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, precontext})
+ }
+ for len(bigpatch.diffs) != 0 && patch.Length1 < patchSize-dmp.PatchMargin {
+ diffType := bigpatch.diffs[0].Type
+ diffText := bigpatch.diffs[0].Text
+ if diffType == DiffInsert {
+ // Insertions are harmless.
+ patch.Length2 += len(diffText)
+ Start2 += len(diffText)
+ patch.diffs = append(patch.diffs, bigpatch.diffs[0])
+ bigpatch.diffs = bigpatch.diffs[1:]
+ empty = false
+ } else if diffType == DiffDelete && len(patch.diffs) == 1 && patch.diffs[0].Type == DiffEqual && len(diffText) > 2*patchSize {
+ // This is a large deletion. Let it pass in one chunk.
+ patch.Length1 += len(diffText)
+ Start1 += len(diffText)
+ empty = false
+ patch.diffs = append(patch.diffs, Diff{diffType, diffText})
+ bigpatch.diffs = bigpatch.diffs[1:]
+ } else {
+ // Deletion or equality. Only take as much as we can stomach.
+ diffText = diffText[:min(len(diffText), patchSize-patch.Length1-dmp.PatchMargin)]
+
+ patch.Length1 += len(diffText)
+ Start1 += len(diffText)
+ if diffType == DiffEqual {
+ patch.Length2 += len(diffText)
+ Start2 += len(diffText)
+ } else {
+ empty = false
+ }
+ patch.diffs = append(patch.diffs, Diff{diffType, diffText})
+ if diffText == bigpatch.diffs[0].Text {
+ bigpatch.diffs = bigpatch.diffs[1:]
+ } else {
+ bigpatch.diffs[0].Text =
+ bigpatch.diffs[0].Text[len(diffText):]
+ }
+ }
+ }
+ // Compute the head context for the next patch.
+ precontext = dmp.DiffText2(patch.diffs)
+ precontext = precontext[max(0, len(precontext)-dmp.PatchMargin):]
+
+ postcontext := ""
+ // Append the end context for this patch.
+ if len(dmp.DiffText1(bigpatch.diffs)) > dmp.PatchMargin {
+ postcontext = dmp.DiffText1(bigpatch.diffs)[:dmp.PatchMargin]
+ } else {
+ postcontext = dmp.DiffText1(bigpatch.diffs)
+ }
+
+ if len(postcontext) != 0 {
+ patch.Length1 += len(postcontext)
+ patch.Length2 += len(postcontext)
+ if len(patch.diffs) != 0 && patch.diffs[len(patch.diffs)-1].Type == DiffEqual {
+ patch.diffs[len(patch.diffs)-1].Text += postcontext
+ } else {
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, postcontext})
+ }
+ }
+ if !empty {
+ x++
+ patches = append(patches[:x], append([]Patch{patch}, patches[x:]...)...)
+ }
+ }
+ }
+ return patches
+}
+
+// PatchToText takes a list of patches and returns a textual representation.
+func (dmp *DiffMatchPatch) PatchToText(patches []Patch) string {
+ var text bytes.Buffer
+ for _, aPatch := range patches {
+ _, _ = text.WriteString(aPatch.String())
+ }
+ return text.String()
+}
+
+// PatchFromText parses a textual representation of patches and returns a List of Patch objects.
+func (dmp *DiffMatchPatch) PatchFromText(textline string) ([]Patch, error) {
+ patches := []Patch{}
+ if len(textline) == 0 {
+ return patches, nil
+ }
+ text := strings.Split(textline, "\n")
+ textPointer := 0
+ patchHeader := regexp.MustCompile("^@@ -(\\d+),?(\\d*) \\+(\\d+),?(\\d*) @@$")
+
+ var patch Patch
+ var sign uint8
+ var line string
+ for textPointer < len(text) {
+
+ if !patchHeader.MatchString(text[textPointer]) {
+ return patches, errors.New("Invalid patch string: " + text[textPointer])
+ }
+
+ patch = Patch{}
+ m := patchHeader.FindStringSubmatch(text[textPointer])
+
+ patch.Start1, _ = strconv.Atoi(m[1])
+ if len(m[2]) == 0 {
+ patch.Start1--
+ patch.Length1 = 1
+ } else if m[2] == "0" {
+ patch.Length1 = 0
+ } else {
+ patch.Start1--
+ patch.Length1, _ = strconv.Atoi(m[2])
+ }
+
+ patch.Start2, _ = strconv.Atoi(m[3])
+
+ if len(m[4]) == 0 {
+ patch.Start2--
+ patch.Length2 = 1
+ } else if m[4] == "0" {
+ patch.Length2 = 0
+ } else {
+ patch.Start2--
+ patch.Length2, _ = strconv.Atoi(m[4])
+ }
+ textPointer++
+
+ for textPointer < len(text) {
+ if len(text[textPointer]) > 0 {
+ sign = text[textPointer][0]
+ } else {
+ textPointer++
+ continue
+ }
+
+ line = text[textPointer][1:]
+ line = strings.Replace(line, "+", "%2b", -1)
+ line, _ = url.QueryUnescape(line)
+ if sign == '-' {
+ // Deletion.
+ patch.diffs = append(patch.diffs, Diff{DiffDelete, line})
+ } else if sign == '+' {
+ // Insertion.
+ patch.diffs = append(patch.diffs, Diff{DiffInsert, line})
+ } else if sign == ' ' {
+ // Minor equality.
+ patch.diffs = append(patch.diffs, Diff{DiffEqual, line})
+ } else if sign == '@' {
+ // Start of next patch.
+ break
+ } else {
+ // WTF?
+ return patches, errors.New("Invalid patch mode '" + string(sign) + "' in: " + string(line))
+ }
+ textPointer++
+ }
+
+ patches = append(patches, patch)
+ }
+ return patches, nil
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/stringutil.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/stringutil.go
new file mode 100644
index 00000000..265f29cc
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch/stringutil.go
@@ -0,0 +1,88 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+ "strings"
+ "unicode/utf8"
+)
+
+// unescaper unescapes selected chars for compatibility with JavaScript's encodeURI.
+// In speed critical applications this could be dropped since the receiving application will certainly decode these fine. Note that this function is case-sensitive. Thus "%3F" would not be unescaped. But this is ok because it is only called with the output of HttpUtility.UrlEncode which returns lowercase hex. Example: "%3f" -> "?", "%24" -> "$", etc.
+var unescaper = strings.NewReplacer(
+ "%21", "!", "%7E", "~", "%27", "'",
+ "%28", "(", "%29", ")", "%3B", ";",
+ "%2F", "/", "%3F", "?", "%3A", ":",
+ "%40", "@", "%26", "&", "%3D", "=",
+ "%2B", "+", "%24", "$", "%2C", ",", "%23", "#", "%2A", "*")
+
+// indexOf returns the first index of pattern in str, starting at str[i].
+func indexOf(str string, pattern string, i int) int {
+ if i > len(str)-1 {
+ return -1
+ }
+ if i <= 0 {
+ return strings.Index(str, pattern)
+ }
+ ind := strings.Index(str[i:], pattern)
+ if ind == -1 {
+ return -1
+ }
+ return ind + i
+}
+
+// lastIndexOf returns the last index of pattern in str, starting at str[i].
+func lastIndexOf(str string, pattern string, i int) int {
+ if i < 0 {
+ return -1
+ }
+ if i >= len(str) {
+ return strings.LastIndex(str, pattern)
+ }
+ _, size := utf8.DecodeRuneInString(str[i:])
+ return strings.LastIndex(str[:i+size], pattern)
+}
+
+// runesIndexOf returns the index of pattern in target, starting at target[i].
+func runesIndexOf(target, pattern []rune, i int) int {
+ if i > len(target)-1 {
+ return -1
+ }
+ if i <= 0 {
+ return runesIndex(target, pattern)
+ }
+ ind := runesIndex(target[i:], pattern)
+ if ind == -1 {
+ return -1
+ }
+ return ind + i
+}
+
+func runesEqual(r1, r2 []rune) bool {
+ if len(r1) != len(r2) {
+ return false
+ }
+ for i, c := range r1 {
+ if c != r2[i] {
+ return false
+ }
+ }
+ return true
+}
+
+// runesIndex is the equivalent of strings.Index for rune slices.
+func runesIndex(r1, r2 []rune) int {
+ last := len(r1) - len(r2)
+ for i := 0; i <= last; i++ {
+ if runesEqual(r1[i:i+len(r2)], r2) {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE b/backend/vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE
new file mode 100644
index 00000000..6280ff0e
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE
@@ -0,0 +1,27 @@
+// Copyright (c) 2015 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go
new file mode 100644
index 00000000..313611ef
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go
@@ -0,0 +1,481 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package render
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "sort"
+ "strconv"
+)
+
+var builtinTypeMap = map[reflect.Kind]string{
+ reflect.Bool: "bool",
+ reflect.Complex128: "complex128",
+ reflect.Complex64: "complex64",
+ reflect.Float32: "float32",
+ reflect.Float64: "float64",
+ reflect.Int16: "int16",
+ reflect.Int32: "int32",
+ reflect.Int64: "int64",
+ reflect.Int8: "int8",
+ reflect.Int: "int",
+ reflect.String: "string",
+ reflect.Uint16: "uint16",
+ reflect.Uint32: "uint32",
+ reflect.Uint64: "uint64",
+ reflect.Uint8: "uint8",
+ reflect.Uint: "uint",
+ reflect.Uintptr: "uintptr",
+}
+
+var builtinTypeSet = map[string]struct{}{}
+
+func init() {
+ for _, v := range builtinTypeMap {
+ builtinTypeSet[v] = struct{}{}
+ }
+}
+
+var typeOfString = reflect.TypeOf("")
+var typeOfInt = reflect.TypeOf(int(1))
+var typeOfUint = reflect.TypeOf(uint(1))
+var typeOfFloat = reflect.TypeOf(10.1)
+
+// Render converts a structure to a string representation. Unline the "%#v"
+// format string, this resolves pointer types' contents in structs, maps, and
+// slices/arrays and prints their field values.
+func Render(v interface{}) string {
+ buf := bytes.Buffer{}
+ s := (*traverseState)(nil)
+ s.render(&buf, 0, reflect.ValueOf(v), false)
+ return buf.String()
+}
+
+// renderPointer is called to render a pointer value.
+//
+// This is overridable so that the test suite can have deterministic pointer
+// values in its expectations.
+var renderPointer = func(buf *bytes.Buffer, p uintptr) {
+ fmt.Fprintf(buf, "0x%016x", p)
+}
+
+// traverseState is used to note and avoid recursion as struct members are being
+// traversed.
+//
+// traverseState is allowed to be nil. Specifically, the root state is nil.
+type traverseState struct {
+ parent *traverseState
+ ptr uintptr
+}
+
+func (s *traverseState) forkFor(ptr uintptr) *traverseState {
+ for cur := s; cur != nil; cur = cur.parent {
+ if ptr == cur.ptr {
+ return nil
+ }
+ }
+
+ fs := &traverseState{
+ parent: s,
+ ptr: ptr,
+ }
+ return fs
+}
+
+func (s *traverseState) render(buf *bytes.Buffer, ptrs int, v reflect.Value, implicit bool) {
+ if v.Kind() == reflect.Invalid {
+ buf.WriteString("nil")
+ return
+ }
+ vt := v.Type()
+
+ // If the type being rendered is a potentially recursive type (a type that
+ // can contain itself as a member), we need to avoid recursion.
+ //
+ // If we've already seen this type before, mark that this is the case and
+ // write a recursion placeholder instead of actually rendering it.
+ //
+ // If we haven't seen it before, fork our `seen` tracking so any higher-up
+ // renderers will also render it at least once, then mark that we've seen it
+ // to avoid recursing on lower layers.
+ pe := uintptr(0)
+ vk := vt.Kind()
+ switch vk {
+ case reflect.Ptr:
+ // Since structs and arrays aren't pointers, they can't directly be
+ // recursed, but they can contain pointers to themselves. Record their
+ // pointer to avoid this.
+ switch v.Elem().Kind() {
+ case reflect.Struct, reflect.Array:
+ pe = v.Pointer()
+ }
+
+ case reflect.Slice, reflect.Map:
+ pe = v.Pointer()
+ }
+ if pe != 0 {
+ s = s.forkFor(pe)
+ if s == nil {
+ buf.WriteString("")
+ return
+ }
+ }
+
+ isAnon := func(t reflect.Type) bool {
+ if t.Name() != "" {
+ if _, ok := builtinTypeSet[t.Name()]; !ok {
+ return false
+ }
+ }
+ return t.Kind() != reflect.Interface
+ }
+
+ switch vk {
+ case reflect.Struct:
+ if !implicit {
+ writeType(buf, ptrs, vt)
+ }
+ buf.WriteRune('{')
+ if rendered, ok := renderTime(v); ok {
+ buf.WriteString(rendered)
+ } else {
+ structAnon := vt.Name() == ""
+ for i := 0; i < vt.NumField(); i++ {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+ anon := structAnon && isAnon(vt.Field(i).Type)
+
+ if !anon {
+ buf.WriteString(vt.Field(i).Name)
+ buf.WriteRune(':')
+ }
+
+ s.render(buf, 0, v.Field(i), anon)
+ }
+ }
+ buf.WriteRune('}')
+
+ case reflect.Slice:
+ if v.IsNil() {
+ if !implicit {
+ writeType(buf, ptrs, vt)
+ buf.WriteString("(nil)")
+ } else {
+ buf.WriteString("nil")
+ }
+ return
+ }
+ fallthrough
+
+ case reflect.Array:
+ if !implicit {
+ writeType(buf, ptrs, vt)
+ }
+ anon := vt.Name() == "" && isAnon(vt.Elem())
+ buf.WriteString("{")
+ for i := 0; i < v.Len(); i++ {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+
+ s.render(buf, 0, v.Index(i), anon)
+ }
+ buf.WriteRune('}')
+
+ case reflect.Map:
+ if !implicit {
+ writeType(buf, ptrs, vt)
+ }
+ if v.IsNil() {
+ buf.WriteString("(nil)")
+ } else {
+ buf.WriteString("{")
+
+ mkeys := v.MapKeys()
+ tryAndSortMapKeys(vt, mkeys)
+
+ kt := vt.Key()
+ keyAnon := typeOfString.ConvertibleTo(kt) || typeOfInt.ConvertibleTo(kt) || typeOfUint.ConvertibleTo(kt) || typeOfFloat.ConvertibleTo(kt)
+ valAnon := vt.Name() == "" && isAnon(vt.Elem())
+ for i, mk := range mkeys {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+
+ s.render(buf, 0, mk, keyAnon)
+ buf.WriteString(":")
+ s.render(buf, 0, v.MapIndex(mk), valAnon)
+ }
+ buf.WriteRune('}')
+ }
+
+ case reflect.Ptr:
+ ptrs++
+ fallthrough
+ case reflect.Interface:
+ if v.IsNil() {
+ writeType(buf, ptrs, v.Type())
+ buf.WriteString("(nil)")
+ } else {
+ s.render(buf, ptrs, v.Elem(), false)
+ }
+
+ case reflect.Chan, reflect.Func, reflect.UnsafePointer:
+ writeType(buf, ptrs, vt)
+ buf.WriteRune('(')
+ renderPointer(buf, v.Pointer())
+ buf.WriteRune(')')
+
+ default:
+ tstr := vt.String()
+ implicit = implicit || (ptrs == 0 && builtinTypeMap[vk] == tstr)
+ if !implicit {
+ writeType(buf, ptrs, vt)
+ buf.WriteRune('(')
+ }
+
+ switch vk {
+ case reflect.String:
+ fmt.Fprintf(buf, "%q", v.String())
+ case reflect.Bool:
+ fmt.Fprintf(buf, "%v", v.Bool())
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ fmt.Fprintf(buf, "%d", v.Int())
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ fmt.Fprintf(buf, "%d", v.Uint())
+
+ case reflect.Float32, reflect.Float64:
+ fmt.Fprintf(buf, "%g", v.Float())
+
+ case reflect.Complex64, reflect.Complex128:
+ fmt.Fprintf(buf, "%g", v.Complex())
+ }
+
+ if !implicit {
+ buf.WriteRune(')')
+ }
+ }
+}
+
+func writeType(buf *bytes.Buffer, ptrs int, t reflect.Type) {
+ parens := ptrs > 0
+ switch t.Kind() {
+ case reflect.Chan, reflect.Func, reflect.UnsafePointer:
+ parens = true
+ }
+
+ if parens {
+ buf.WriteRune('(')
+ for i := 0; i < ptrs; i++ {
+ buf.WriteRune('*')
+ }
+ }
+
+ switch t.Kind() {
+ case reflect.Ptr:
+ if ptrs == 0 {
+ // This pointer was referenced from within writeType (e.g., as part of
+ // rendering a list), and so hasn't had its pointer asterisk accounted
+ // for.
+ buf.WriteRune('*')
+ }
+ writeType(buf, 0, t.Elem())
+
+ case reflect.Interface:
+ if n := t.Name(); n != "" {
+ buf.WriteString(t.String())
+ } else {
+ buf.WriteString("interface{}")
+ }
+
+ case reflect.Array:
+ buf.WriteRune('[')
+ buf.WriteString(strconv.FormatInt(int64(t.Len()), 10))
+ buf.WriteRune(']')
+ writeType(buf, 0, t.Elem())
+
+ case reflect.Slice:
+ if t == reflect.SliceOf(t.Elem()) {
+ buf.WriteString("[]")
+ writeType(buf, 0, t.Elem())
+ } else {
+ // Custom slice type, use type name.
+ buf.WriteString(t.String())
+ }
+
+ case reflect.Map:
+ if t == reflect.MapOf(t.Key(), t.Elem()) {
+ buf.WriteString("map[")
+ writeType(buf, 0, t.Key())
+ buf.WriteRune(']')
+ writeType(buf, 0, t.Elem())
+ } else {
+ // Custom map type, use type name.
+ buf.WriteString(t.String())
+ }
+
+ default:
+ buf.WriteString(t.String())
+ }
+
+ if parens {
+ buf.WriteRune(')')
+ }
+}
+
+type cmpFn func(a, b reflect.Value) int
+
+type sortableValueSlice struct {
+ cmp cmpFn
+ elements []reflect.Value
+}
+
+func (s sortableValueSlice) Len() int {
+ return len(s.elements)
+}
+
+func (s sortableValueSlice) Less(i, j int) bool {
+ return s.cmp(s.elements[i], s.elements[j]) < 0
+}
+
+func (s sortableValueSlice) Swap(i, j int) {
+ s.elements[i], s.elements[j] = s.elements[j], s.elements[i]
+}
+
+// cmpForType returns a cmpFn which sorts the data for some type t in the same
+// order that a go-native map key is compared for equality.
+func cmpForType(t reflect.Type) cmpFn {
+ switch t.Kind() {
+ case reflect.String:
+ return func(av, bv reflect.Value) int {
+ a, b := av.String(), bv.String()
+ if a < b {
+ return -1
+ } else if a > b {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Bool:
+ return func(av, bv reflect.Value) int {
+ a, b := av.Bool(), bv.Bool()
+ if !a && b {
+ return -1
+ } else if a && !b {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return func(av, bv reflect.Value) int {
+ a, b := av.Int(), bv.Int()
+ if a < b {
+ return -1
+ } else if a > b {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
+ reflect.Uint64, reflect.Uintptr, reflect.UnsafePointer:
+ return func(av, bv reflect.Value) int {
+ a, b := av.Uint(), bv.Uint()
+ if a < b {
+ return -1
+ } else if a > b {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Float32, reflect.Float64:
+ return func(av, bv reflect.Value) int {
+ a, b := av.Float(), bv.Float()
+ if a < b {
+ return -1
+ } else if a > b {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Interface:
+ return func(av, bv reflect.Value) int {
+ a, b := av.InterfaceData(), bv.InterfaceData()
+ if a[0] < b[0] {
+ return -1
+ } else if a[0] > b[0] {
+ return 1
+ }
+ if a[1] < b[1] {
+ return -1
+ } else if a[1] > b[1] {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Complex64, reflect.Complex128:
+ return func(av, bv reflect.Value) int {
+ a, b := av.Complex(), bv.Complex()
+ if real(a) < real(b) {
+ return -1
+ } else if real(a) > real(b) {
+ return 1
+ }
+ if imag(a) < imag(b) {
+ return -1
+ } else if imag(a) > imag(b) {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Ptr, reflect.Chan:
+ return func(av, bv reflect.Value) int {
+ a, b := av.Pointer(), bv.Pointer()
+ if a < b {
+ return -1
+ } else if a > b {
+ return 1
+ }
+ return 0
+ }
+
+ case reflect.Struct:
+ cmpLst := make([]cmpFn, t.NumField())
+ for i := range cmpLst {
+ cmpLst[i] = cmpForType(t.Field(i).Type)
+ }
+ return func(a, b reflect.Value) int {
+ for i, cmp := range cmpLst {
+ if rslt := cmp(a.Field(i), b.Field(i)); rslt != 0 {
+ return rslt
+ }
+ }
+ return 0
+ }
+ }
+
+ return nil
+}
+
+func tryAndSortMapKeys(mt reflect.Type, k []reflect.Value) {
+ if cmp := cmpForType(mt.Key()); cmp != nil {
+ sort.Sort(sortableValueSlice{cmp, k})
+ }
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/go-render/render/render_time.go b/backend/vendor/github.com/smartystreets/assertions/internal/go-render/render/render_time.go
new file mode 100644
index 00000000..990c75d0
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/go-render/render/render_time.go
@@ -0,0 +1,26 @@
+package render
+
+import (
+ "reflect"
+ "time"
+)
+
+func renderTime(value reflect.Value) (string, bool) {
+ if instant, ok := convertTime(value); !ok {
+ return "", false
+ } else if instant.IsZero() {
+ return "0", true
+ } else {
+ return instant.String(), true
+ }
+}
+
+func convertTime(value reflect.Value) (t time.Time, ok bool) {
+ if value.Type() == timeType {
+ defer func() { recover() }()
+ t, ok = value.Interface().(time.Time)
+ }
+ return
+}
+
+var timeType = reflect.TypeOf(time.Time{})
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/.gitignore b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/.gitignore
new file mode 100644
index 00000000..dd8fc746
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/.gitignore
@@ -0,0 +1,5 @@
+*.6
+6.out
+_obj/
+_test/
+_testmain.go
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml
new file mode 100644
index 00000000..b9721192
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml
@@ -0,0 +1,4 @@
+# Cf. http://docs.travis-ci.com/user/getting-started/
+# Cf. http://docs.travis-ci.com/user/languages/go/
+
+language: go
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/README.md b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/README.md
new file mode 100644
index 00000000..215a2bb7
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/README.md
@@ -0,0 +1,58 @@
+[](https://godoc.org/github.com/smartystreets/assertions/internal/oglematchers)
+
+`oglematchers` is a package for the Go programming language containing a set of
+matchers, useful in a testing or mocking framework, inspired by and mostly
+compatible with [Google Test][googletest] for C++ and
+[Google JS Test][google-js-test]. The package is used by the
+[ogletest][ogletest] testing framework and [oglemock][oglemock] mocking
+framework, which may be more directly useful to you, but can be generically used
+elsewhere as well.
+
+A "matcher" is simply an object with a `Matches` method defining a set of golang
+values matched by the matcher, and a `Description` method describing that set.
+For example, here are some matchers:
+
+```go
+// Numbers
+Equals(17.13)
+LessThan(19)
+
+// Strings
+Equals("taco")
+HasSubstr("burrito")
+MatchesRegex("t.*o")
+
+// Combining matchers
+AnyOf(LessThan(17), GreaterThan(19))
+```
+
+There are lots more; see [here][reference] for a reference. You can also add
+your own simply by implementing the `oglematchers.Matcher` interface.
+
+
+Installation
+------------
+
+First, make sure you have installed Go 1.0.2 or newer. See
+[here][golang-install] for instructions.
+
+Use the following command to install `oglematchers` and keep it up to date:
+
+ go get -u github.com/smartystreets/assertions/internal/oglematchers
+
+
+Documentation
+-------------
+
+See [here][reference] for documentation. Alternatively, you can install the
+package and then use `godoc`:
+
+ godoc github.com/smartystreets/assertions/internal/oglematchers
+
+
+[reference]: http://godoc.org/github.com/smartystreets/assertions/internal/oglematchers
+[golang-install]: http://golang.org/doc/install.html
+[googletest]: http://code.google.com/p/googletest/
+[google-js-test]: http://code.google.com/p/google-js-test/
+[ogletest]: http://github.com/smartystreets/assertions/internal/ogletest
+[oglemock]: http://github.com/smartystreets/assertions/internal/oglemock
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go
new file mode 100644
index 00000000..2918b51f
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go
@@ -0,0 +1,94 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// AnyOf accepts a set of values S and returns a matcher that follows the
+// algorithm below when considering a candidate c:
+//
+// 1. If there exists a value m in S such that m implements the Matcher
+// interface and m matches c, return true.
+//
+// 2. Otherwise, if there exists a value v in S such that v does not implement
+// the Matcher interface and the matcher Equals(v) matches c, return true.
+//
+// 3. Otherwise, if there is a value m in S such that m implements the Matcher
+// interface and m returns a fatal error for c, return that fatal error.
+//
+// 4. Otherwise, return false.
+//
+// This is akin to a logical OR operation for matchers, with non-matchers x
+// being treated as Equals(x).
+func AnyOf(vals ...interface{}) Matcher {
+ // Get ahold of a type variable for the Matcher interface.
+ var dummy *Matcher
+ matcherType := reflect.TypeOf(dummy).Elem()
+
+ // Create a matcher for each value, or use the value itself if it's already a
+ // matcher.
+ wrapped := make([]Matcher, len(vals))
+ for i, v := range vals {
+ t := reflect.TypeOf(v)
+ if t != nil && t.Implements(matcherType) {
+ wrapped[i] = v.(Matcher)
+ } else {
+ wrapped[i] = Equals(v)
+ }
+ }
+
+ return &anyOfMatcher{wrapped}
+}
+
+type anyOfMatcher struct {
+ wrapped []Matcher
+}
+
+func (m *anyOfMatcher) Description() string {
+ wrappedDescs := make([]string, len(m.wrapped))
+ for i, matcher := range m.wrapped {
+ wrappedDescs[i] = matcher.Description()
+ }
+
+ return fmt.Sprintf("or(%s)", strings.Join(wrappedDescs, ", "))
+}
+
+func (m *anyOfMatcher) Matches(c interface{}) (err error) {
+ err = errors.New("")
+
+ // Try each matcher in turn.
+ for _, matcher := range m.wrapped {
+ wrappedErr := matcher.Matches(c)
+
+ // Return immediately if there's a match.
+ if wrappedErr == nil {
+ err = nil
+ return
+ }
+
+ // Note the fatal error, if any.
+ if _, isFatal := wrappedErr.(*FatalError); isFatal {
+ err = wrappedErr
+ }
+ }
+
+ return
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go
new file mode 100644
index 00000000..87f107d3
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go
@@ -0,0 +1,61 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Return a matcher that matches arrays slices with at least one element that
+// matches the supplied argument. If the argument x is not itself a Matcher,
+// this is equivalent to Contains(Equals(x)).
+func Contains(x interface{}) Matcher {
+ var result containsMatcher
+ var ok bool
+
+ if result.elementMatcher, ok = x.(Matcher); !ok {
+ result.elementMatcher = DeepEquals(x)
+ }
+
+ return &result
+}
+
+type containsMatcher struct {
+ elementMatcher Matcher
+}
+
+func (m *containsMatcher) Description() string {
+ return fmt.Sprintf("contains: %s", m.elementMatcher.Description())
+}
+
+func (m *containsMatcher) Matches(candidate interface{}) error {
+ // The candidate must be a slice or an array.
+ v := reflect.ValueOf(candidate)
+ if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
+ return NewFatalError("which is not a slice or array")
+ }
+
+ // Check each element.
+ for i := 0; i < v.Len(); i++ {
+ elem := v.Index(i)
+ if matchErr := m.elementMatcher.Matches(elem.Interface()); matchErr == nil {
+ return nil
+ }
+ }
+
+ return fmt.Errorf("")
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go
new file mode 100644
index 00000000..1d91baef
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go
@@ -0,0 +1,88 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+var byteSliceType reflect.Type = reflect.TypeOf([]byte{})
+
+// DeepEquals returns a matcher that matches based on 'deep equality', as
+// defined by the reflect package. This matcher requires that values have
+// identical types to x.
+func DeepEquals(x interface{}) Matcher {
+ return &deepEqualsMatcher{x}
+}
+
+type deepEqualsMatcher struct {
+ x interface{}
+}
+
+func (m *deepEqualsMatcher) Description() string {
+ xDesc := fmt.Sprintf("%v", m.x)
+ xValue := reflect.ValueOf(m.x)
+
+ // Special case: fmt.Sprintf presents nil slices as "[]", but
+ // reflect.DeepEqual makes a distinction between nil and empty slices. Make
+ // this less confusing.
+ if xValue.Kind() == reflect.Slice && xValue.IsNil() {
+ xDesc = ""
+ }
+
+ return fmt.Sprintf("deep equals: %s", xDesc)
+}
+
+func (m *deepEqualsMatcher) Matches(c interface{}) error {
+ // Make sure the types match.
+ ct := reflect.TypeOf(c)
+ xt := reflect.TypeOf(m.x)
+
+ if ct != xt {
+ return NewFatalError(fmt.Sprintf("which is of type %v", ct))
+ }
+
+ // Special case: handle byte slices more efficiently.
+ cValue := reflect.ValueOf(c)
+ xValue := reflect.ValueOf(m.x)
+
+ if ct == byteSliceType && !cValue.IsNil() && !xValue.IsNil() {
+ xBytes := m.x.([]byte)
+ cBytes := c.([]byte)
+
+ if bytes.Equal(cBytes, xBytes) {
+ return nil
+ }
+
+ return errors.New("")
+ }
+
+ // Defer to the reflect package.
+ if reflect.DeepEqual(m.x, c) {
+ return nil
+ }
+
+ // Special case: if the comparison failed because c is the nil slice, given
+ // an indication of this (since its value is printed as "[]").
+ if cValue.Kind() == reflect.Slice && cValue.IsNil() {
+ return errors.New("which is nil")
+ }
+
+ return errors.New("")
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go
new file mode 100644
index 00000000..a510707b
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go
@@ -0,0 +1,541 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+// Equals(x) returns a matcher that matches values v such that v and x are
+// equivalent. This includes the case when the comparison v == x using Go's
+// built-in comparison operator is legal (except for structs, which this
+// matcher does not support), but for convenience the following rules also
+// apply:
+//
+// * Type checking is done based on underlying types rather than actual
+// types, so that e.g. two aliases for string can be compared:
+//
+// type stringAlias1 string
+// type stringAlias2 string
+//
+// a := "taco"
+// b := stringAlias1("taco")
+// c := stringAlias2("taco")
+//
+// ExpectTrue(a == b) // Legal, passes
+// ExpectTrue(b == c) // Illegal, doesn't compile
+//
+// ExpectThat(a, Equals(b)) // Passes
+// ExpectThat(b, Equals(c)) // Passes
+//
+// * Values of numeric type are treated as if they were abstract numbers, and
+// compared accordingly. Therefore Equals(17) will match int(17),
+// int16(17), uint(17), float32(17), complex64(17), and so on.
+//
+// If you want a stricter matcher that contains no such cleverness, see
+// IdenticalTo instead.
+//
+// Arrays are supported by this matcher, but do not participate in the
+// exceptions above. Two arrays compared with this matcher must have identical
+// types, and their element type must itself be comparable according to Go's ==
+// operator.
+func Equals(x interface{}) Matcher {
+ v := reflect.ValueOf(x)
+
+ // This matcher doesn't support structs.
+ if v.Kind() == reflect.Struct {
+ panic(fmt.Sprintf("oglematchers.Equals: unsupported kind %v", v.Kind()))
+ }
+
+ // The == operator is not defined for non-nil slices.
+ if v.Kind() == reflect.Slice && v.Pointer() != uintptr(0) {
+ panic(fmt.Sprintf("oglematchers.Equals: non-nil slice"))
+ }
+
+ return &equalsMatcher{v}
+}
+
+type equalsMatcher struct {
+ expectedValue reflect.Value
+}
+
+////////////////////////////////////////////////////////////////////////
+// Numeric types
+////////////////////////////////////////////////////////////////////////
+
+func isSignedInteger(v reflect.Value) bool {
+ k := v.Kind()
+ return k >= reflect.Int && k <= reflect.Int64
+}
+
+func isUnsignedInteger(v reflect.Value) bool {
+ k := v.Kind()
+ return k >= reflect.Uint && k <= reflect.Uintptr
+}
+
+func isInteger(v reflect.Value) bool {
+ return isSignedInteger(v) || isUnsignedInteger(v)
+}
+
+func isFloat(v reflect.Value) bool {
+ k := v.Kind()
+ return k == reflect.Float32 || k == reflect.Float64
+}
+
+func isComplex(v reflect.Value) bool {
+ k := v.Kind()
+ return k == reflect.Complex64 || k == reflect.Complex128
+}
+
+func checkAgainstInt64(e int64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ if c.Int() == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ u := c.Uint()
+ if u <= math.MaxInt64 && int64(u) == e {
+ err = nil
+ }
+
+ // Turn around the various floating point types so that the checkAgainst*
+ // functions for them can deal with precision issues.
+ case isFloat(c), isComplex(c):
+ return Equals(c.Interface()).Matches(e)
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstUint64(e uint64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ i := c.Int()
+ if i >= 0 && uint64(i) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if c.Uint() == e {
+ err = nil
+ }
+
+ // Turn around the various floating point types so that the checkAgainst*
+ // functions for them can deal with precision issues.
+ case isFloat(c), isComplex(c):
+ return Equals(c.Interface()).Matches(e)
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstFloat32(e float32, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ if float32(c.Int()) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if float32(c.Uint()) == e {
+ err = nil
+ }
+
+ case isFloat(c):
+ // Compare using float32 to avoid a false sense of precision; otherwise
+ // e.g. Equals(float32(0.1)) won't match float32(0.1).
+ if float32(c.Float()) == e {
+ err = nil
+ }
+
+ case isComplex(c):
+ comp := c.Complex()
+ rl := real(comp)
+ im := imag(comp)
+
+ // Compare using float32 to avoid a false sense of precision; otherwise
+ // e.g. Equals(float32(0.1)) won't match (0.1 + 0i).
+ if im == 0 && float32(rl) == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstFloat64(e float64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ ck := c.Kind()
+
+ switch {
+ case isSignedInteger(c):
+ if float64(c.Int()) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if float64(c.Uint()) == e {
+ err = nil
+ }
+
+ // If the actual value is lower precision, turn the comparison around so we
+ // apply the low-precision rules. Otherwise, e.g. Equals(0.1) may not match
+ // float32(0.1).
+ case ck == reflect.Float32 || ck == reflect.Complex64:
+ return Equals(c.Interface()).Matches(e)
+
+ // Otherwise, compare with double precision.
+ case isFloat(c):
+ if c.Float() == e {
+ err = nil
+ }
+
+ case isComplex(c):
+ comp := c.Complex()
+ rl := real(comp)
+ im := imag(comp)
+
+ if im == 0 && rl == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstComplex64(e complex64, c reflect.Value) (err error) {
+ err = errors.New("")
+ realPart := real(e)
+ imaginaryPart := imag(e)
+
+ switch {
+ case isInteger(c) || isFloat(c):
+ // If we have no imaginary part, then we should just compare against the
+ // real part. Otherwise, we can't be equal.
+ if imaginaryPart != 0 {
+ return
+ }
+
+ return checkAgainstFloat32(realPart, c)
+
+ case isComplex(c):
+ // Compare using complex64 to avoid a false sense of precision; otherwise
+ // e.g. Equals(0.1 + 0i) won't match float32(0.1).
+ if complex64(c.Complex()) == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstComplex128(e complex128, c reflect.Value) (err error) {
+ err = errors.New("")
+ realPart := real(e)
+ imaginaryPart := imag(e)
+
+ switch {
+ case isInteger(c) || isFloat(c):
+ // If we have no imaginary part, then we should just compare against the
+ // real part. Otherwise, we can't be equal.
+ if imaginaryPart != 0 {
+ return
+ }
+
+ return checkAgainstFloat64(realPart, c)
+
+ case isComplex(c):
+ if c.Complex() == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+////////////////////////////////////////////////////////////////////////
+// Other types
+////////////////////////////////////////////////////////////////////////
+
+func checkAgainstBool(e bool, c reflect.Value) (err error) {
+ if c.Kind() != reflect.Bool {
+ err = NewFatalError("which is not a bool")
+ return
+ }
+
+ err = errors.New("")
+ if c.Bool() == e {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstChan(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "chan int".
+ typeStr := fmt.Sprintf("%s %s", e.Type().ChanDir(), e.Type().Elem())
+
+ // Make sure c is a chan of the correct type.
+ if c.Kind() != reflect.Chan ||
+ c.Type().ChanDir() != e.Type().ChanDir() ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstFunc(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a function.
+ if c.Kind() != reflect.Func {
+ err = NewFatalError("which is not a function")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstMap(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a map.
+ if c.Kind() != reflect.Map {
+ err = NewFatalError("which is not a map")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstPtr(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "*int".
+ typeStr := fmt.Sprintf("*%v", e.Type().Elem())
+
+ // Make sure c is a pointer of the correct type.
+ if c.Kind() != reflect.Ptr ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstSlice(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "[]int".
+ typeStr := fmt.Sprintf("[]%v", e.Type().Elem())
+
+ // Make sure c is a slice of the correct type.
+ if c.Kind() != reflect.Slice ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstString(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a string.
+ if c.Kind() != reflect.String {
+ err = NewFatalError("which is not a string")
+ return
+ }
+
+ err = errors.New("")
+ if c.String() == e.String() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstArray(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "[2]int".
+ typeStr := fmt.Sprintf("%v", e.Type())
+
+ // Make sure c is the correct type.
+ if c.Type() != e.Type() {
+ err = NewFatalError(fmt.Sprintf("which is not %s", typeStr))
+ return
+ }
+
+ // Check for equality.
+ if e.Interface() != c.Interface() {
+ err = errors.New("")
+ return
+ }
+
+ return
+}
+
+func checkAgainstUnsafePointer(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a pointer.
+ if c.Kind() != reflect.UnsafePointer {
+ err = NewFatalError("which is not a unsafe.Pointer")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkForNil(c reflect.Value) (err error) {
+ err = errors.New("")
+
+ // Make sure it is legal to call IsNil.
+ switch c.Kind() {
+ case reflect.Invalid:
+ case reflect.Chan:
+ case reflect.Func:
+ case reflect.Interface:
+ case reflect.Map:
+ case reflect.Ptr:
+ case reflect.Slice:
+
+ default:
+ err = NewFatalError("which cannot be compared to nil")
+ return
+ }
+
+ // Ask whether the value is nil. Handle a nil literal (kind Invalid)
+ // specially, since it's not legal to call IsNil there.
+ if c.Kind() == reflect.Invalid || c.IsNil() {
+ err = nil
+ }
+ return
+}
+
+////////////////////////////////////////////////////////////////////////
+// Public implementation
+////////////////////////////////////////////////////////////////////////
+
+func (m *equalsMatcher) Matches(candidate interface{}) error {
+ e := m.expectedValue
+ c := reflect.ValueOf(candidate)
+ ek := e.Kind()
+
+ switch {
+ case ek == reflect.Bool:
+ return checkAgainstBool(e.Bool(), c)
+
+ case isSignedInteger(e):
+ return checkAgainstInt64(e.Int(), c)
+
+ case isUnsignedInteger(e):
+ return checkAgainstUint64(e.Uint(), c)
+
+ case ek == reflect.Float32:
+ return checkAgainstFloat32(float32(e.Float()), c)
+
+ case ek == reflect.Float64:
+ return checkAgainstFloat64(e.Float(), c)
+
+ case ek == reflect.Complex64:
+ return checkAgainstComplex64(complex64(e.Complex()), c)
+
+ case ek == reflect.Complex128:
+ return checkAgainstComplex128(complex128(e.Complex()), c)
+
+ case ek == reflect.Chan:
+ return checkAgainstChan(e, c)
+
+ case ek == reflect.Func:
+ return checkAgainstFunc(e, c)
+
+ case ek == reflect.Map:
+ return checkAgainstMap(e, c)
+
+ case ek == reflect.Ptr:
+ return checkAgainstPtr(e, c)
+
+ case ek == reflect.Slice:
+ return checkAgainstSlice(e, c)
+
+ case ek == reflect.String:
+ return checkAgainstString(e, c)
+
+ case ek == reflect.Array:
+ return checkAgainstArray(e, c)
+
+ case ek == reflect.UnsafePointer:
+ return checkAgainstUnsafePointer(e, c)
+
+ case ek == reflect.Invalid:
+ return checkForNil(c)
+ }
+
+ panic(fmt.Sprintf("equalsMatcher.Matches: unexpected kind: %v", ek))
+}
+
+func (m *equalsMatcher) Description() string {
+ // Special case: handle nil.
+ if !m.expectedValue.IsValid() {
+ return "is nil"
+ }
+
+ return fmt.Sprintf("%v", m.expectedValue.Interface())
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go
new file mode 100644
index 00000000..4b9d103a
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go
@@ -0,0 +1,39 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// GreaterOrEqual returns a matcher that matches integer, floating point, or
+// strings values v such that v >= x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// GreaterOrEqual will panic.
+func GreaterOrEqual(x interface{}) Matcher {
+ desc := fmt.Sprintf("greater than or equal to %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("greater than or equal to \"%s\"", x)
+ }
+
+ return transformDescription(Not(LessThan(x)), desc)
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go
new file mode 100644
index 00000000..3eef3217
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go
@@ -0,0 +1,39 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// GreaterThan returns a matcher that matches integer, floating point, or
+// strings values v such that v > x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// GreaterThan will panic.
+func GreaterThan(x interface{}) Matcher {
+ desc := fmt.Sprintf("greater than %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("greater than \"%s\"", x)
+ }
+
+ return transformDescription(Not(LessOrEqual(x)), desc)
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go
new file mode 100644
index 00000000..8402cdea
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go
@@ -0,0 +1,41 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// LessOrEqual returns a matcher that matches integer, floating point, or
+// strings values v such that v <= x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// LessOrEqual will panic.
+func LessOrEqual(x interface{}) Matcher {
+ desc := fmt.Sprintf("less than or equal to %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("less than or equal to \"%s\"", x)
+ }
+
+ // Put LessThan last so that its error messages will be used in the event of
+ // failure.
+ return transformDescription(AnyOf(Equals(x), LessThan(x)), desc)
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go
new file mode 100644
index 00000000..8258e45d
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go
@@ -0,0 +1,152 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+// LessThan returns a matcher that matches integer, floating point, or strings
+// values v such that v < x. Comparison is not defined between numeric and
+// string types, but is defined between all integer and floating point types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// LessThan will panic.
+func LessThan(x interface{}) Matcher {
+ v := reflect.ValueOf(x)
+ kind := v.Kind()
+
+ switch {
+ case isInteger(v):
+ case isFloat(v):
+ case kind == reflect.String:
+
+ default:
+ panic(fmt.Sprintf("LessThan: unexpected kind %v", kind))
+ }
+
+ return &lessThanMatcher{v}
+}
+
+type lessThanMatcher struct {
+ limit reflect.Value
+}
+
+func (m *lessThanMatcher) Description() string {
+ // Special case: make it clear that strings are strings.
+ if m.limit.Kind() == reflect.String {
+ return fmt.Sprintf("less than \"%s\"", m.limit.String())
+ }
+
+ return fmt.Sprintf("less than %v", m.limit.Interface())
+}
+
+func compareIntegers(v1, v2 reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(v1) && isSignedInteger(v2):
+ if v1.Int() < v2.Int() {
+ err = nil
+ }
+ return
+
+ case isSignedInteger(v1) && isUnsignedInteger(v2):
+ if v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() {
+ err = nil
+ }
+ return
+
+ case isUnsignedInteger(v1) && isSignedInteger(v2):
+ if v1.Uint() <= math.MaxInt64 && int64(v1.Uint()) < v2.Int() {
+ err = nil
+ }
+ return
+
+ case isUnsignedInteger(v1) && isUnsignedInteger(v2):
+ if v1.Uint() < v2.Uint() {
+ err = nil
+ }
+ return
+ }
+
+ panic(fmt.Sprintf("compareIntegers: %v %v", v1, v2))
+}
+
+func getFloat(v reflect.Value) float64 {
+ switch {
+ case isSignedInteger(v):
+ return float64(v.Int())
+
+ case isUnsignedInteger(v):
+ return float64(v.Uint())
+
+ case isFloat(v):
+ return v.Float()
+ }
+
+ panic(fmt.Sprintf("getFloat: %v", v))
+}
+
+func (m *lessThanMatcher) Matches(c interface{}) (err error) {
+ v1 := reflect.ValueOf(c)
+ v2 := m.limit
+
+ err = errors.New("")
+
+ // Handle strings as a special case.
+ if v1.Kind() == reflect.String && v2.Kind() == reflect.String {
+ if v1.String() < v2.String() {
+ err = nil
+ }
+ return
+ }
+
+ // If we get here, we require that we are dealing with integers or floats.
+ v1Legal := isInteger(v1) || isFloat(v1)
+ v2Legal := isInteger(v2) || isFloat(v2)
+ if !v1Legal || !v2Legal {
+ err = NewFatalError("which is not comparable")
+ return
+ }
+
+ // Handle the various comparison cases.
+ switch {
+ // Both integers
+ case isInteger(v1) && isInteger(v2):
+ return compareIntegers(v1, v2)
+
+ // At least one float32
+ case v1.Kind() == reflect.Float32 || v2.Kind() == reflect.Float32:
+ if float32(getFloat(v1)) < float32(getFloat(v2)) {
+ err = nil
+ }
+ return
+
+ // At least one float64
+ case v1.Kind() == reflect.Float64 || v2.Kind() == reflect.Float64:
+ if getFloat(v1) < getFloat(v2) {
+ err = nil
+ }
+ return
+ }
+
+ // We shouldn't get here.
+ panic(fmt.Sprintf("lessThanMatcher.Matches: Shouldn't get here: %v %v", v1, v2))
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go
new file mode 100644
index 00000000..78159a07
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go
@@ -0,0 +1,86 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package oglematchers provides a set of matchers useful in a testing or
+// mocking framework. These matchers are inspired by and mostly compatible with
+// Google Test for C++ and Google JS Test.
+//
+// This package is used by github.com/smartystreets/assertions/internal/ogletest and
+// github.com/smartystreets/assertions/internal/oglemock, which may be more directly useful if you're not
+// writing your own testing package or defining your own matchers.
+package oglematchers
+
+// A Matcher is some predicate implicitly defining a set of values that it
+// matches. For example, GreaterThan(17) matches all numeric values greater
+// than 17, and HasSubstr("taco") matches all strings with the substring
+// "taco".
+//
+// Matchers are typically exposed to tests via constructor functions like
+// HasSubstr. In order to implement such a function you can either define your
+// own matcher type or use NewMatcher.
+type Matcher interface {
+ // Check whether the supplied value belongs to the the set defined by the
+ // matcher. Return a non-nil error if and only if it does not.
+ //
+ // The error describes why the value doesn't match. The error text is a
+ // relative clause that is suitable for being placed after the value. For
+ // example, a predicate that matches strings with a particular substring may,
+ // when presented with a numerical value, return the following error text:
+ //
+ // "which is not a string"
+ //
+ // Then the failure message may look like:
+ //
+ // Expected: has substring "taco"
+ // Actual: 17, which is not a string
+ //
+ // If the error is self-apparent based on the description of the matcher, the
+ // error text may be empty (but the error still non-nil). For example:
+ //
+ // Expected: 17
+ // Actual: 19
+ //
+ // If you are implementing a new matcher, see also the documentation on
+ // FatalError.
+ Matches(candidate interface{}) error
+
+ // Description returns a string describing the property that values matching
+ // this matcher have, as a verb phrase where the subject is the value. For
+ // example, "is greather than 17" or "has substring "taco"".
+ Description() string
+}
+
+// FatalError is an implementation of the error interface that may be returned
+// from matchers, indicating the error should be propagated. Returning a
+// *FatalError indicates that the matcher doesn't process values of the
+// supplied type, or otherwise doesn't know how to handle the value.
+//
+// For example, if GreaterThan(17) returned false for the value "taco" without
+// a fatal error, then Not(GreaterThan(17)) would return true. This is
+// technically correct, but is surprising and may mask failures where the wrong
+// sort of matcher is accidentally used. Instead, GreaterThan(17) can return a
+// fatal error, which will be propagated by Not().
+type FatalError struct {
+ errorText string
+}
+
+// NewFatalError creates a FatalError struct with the supplied error text.
+func NewFatalError(s string) *FatalError {
+ return &FatalError{s}
+}
+
+func (e *FatalError) Error() string {
+ return e.errorText
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go
new file mode 100644
index 00000000..623789fe
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go
@@ -0,0 +1,53 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+)
+
+// Not returns a matcher that inverts the set of values matched by the wrapped
+// matcher. It does not transform the result for values for which the wrapped
+// matcher returns a fatal error.
+func Not(m Matcher) Matcher {
+ return ¬Matcher{m}
+}
+
+type notMatcher struct {
+ wrapped Matcher
+}
+
+func (m *notMatcher) Matches(c interface{}) (err error) {
+ err = m.wrapped.Matches(c)
+
+ // Did the wrapped matcher say yes?
+ if err == nil {
+ return errors.New("")
+ }
+
+ // Did the wrapped matcher return a fatal error?
+ if _, isFatal := err.(*FatalError); isFatal {
+ return err
+ }
+
+ // The wrapped matcher returned a non-fatal error.
+ return nil
+}
+
+func (m *notMatcher) Description() string {
+ return fmt.Sprintf("not(%s)", m.wrapped.Description())
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go
new file mode 100644
index 00000000..8ea2807c
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go
@@ -0,0 +1,36 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// transformDescription returns a matcher that is equivalent to the supplied
+// one, except that it has the supplied description instead of the one attached
+// to the existing matcher.
+func transformDescription(m Matcher, newDesc string) Matcher {
+ return &transformDescriptionMatcher{newDesc, m}
+}
+
+type transformDescriptionMatcher struct {
+ desc string
+ wrappedMatcher Matcher
+}
+
+func (m *transformDescriptionMatcher) Description() string {
+ return m.desc
+}
+
+func (m *transformDescriptionMatcher) Matches(c interface{}) error {
+ return m.wrappedMatcher.Matches(c)
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/messages.go b/backend/vendor/github.com/smartystreets/assertions/messages.go
new file mode 100644
index 00000000..72782b00
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/messages.go
@@ -0,0 +1,106 @@
+package assertions
+
+const (
+ shouldHaveBeenEqual = "Expected: '%v'\nActual: '%v'\n(Should be equal)"
+ shouldHaveBeenEqualNoResemblance = "Both the actual and expected values render equally ('%s') and their types are the same. Try using ShouldResemble instead."
+ shouldNotHaveBeenEqual = "Expected '%v'\nto NOT equal '%v'\n(but it did)!"
+ shouldHaveBeenEqualTypeMismatch = "Expected: '%v' (%T)\nActual: '%v' (%T)\n(Should be equal, type mismatch)"
+
+ shouldHaveBeenAlmostEqual = "Expected '%v' to almost equal '%v' (but it didn't)!"
+ shouldHaveNotBeenAlmostEqual = "Expected '%v' to NOT almost equal '%v' (but it did)!"
+
+ shouldHaveResembled = "Expected: '%s'\nActual: '%s'\n(Should resemble)!"
+ shouldNotHaveResembled = "Expected '%#v'\nto NOT resemble '%#v'\n(but it did)!"
+
+ shouldBePointers = "Both arguments should be pointers "
+ shouldHaveBeenNonNilPointer = shouldBePointers + "(the %s was %s)!"
+ shouldHavePointedTo = "Expected '%+v' (address: '%v') and '%+v' (address: '%v') to be the same address (but their weren't)!"
+ shouldNotHavePointedTo = "Expected '%+v' and '%+v' to be different references (but they matched: '%v')!"
+
+ shouldHaveBeenNil = "Expected: nil\nActual: '%v'"
+ shouldNotHaveBeenNil = "Expected '%+v' to NOT be nil (but it was)!"
+
+ shouldHaveBeenTrue = "Expected: true\nActual: %v"
+ shouldHaveBeenFalse = "Expected: false\nActual: %v"
+
+ shouldHaveBeenZeroValue = "'%+v' should have been the zero value" //"Expected: (zero value)\nActual: %v"
+ shouldNotHaveBeenZeroValue = "'%+v' should NOT have been the zero value"
+
+ shouldHaveBeenGreater = "Expected '%v' to be greater than '%v' (but it wasn't)!"
+ shouldHaveBeenGreaterOrEqual = "Expected '%v' to be greater than or equal to '%v' (but it wasn't)!"
+
+ shouldHaveBeenLess = "Expected '%v' to be less than '%v' (but it wasn't)!"
+ shouldHaveBeenLessOrEqual = "Expected '%v' to be less than or equal to '%v' (but it wasn't)!"
+
+ shouldHaveBeenBetween = "Expected '%v' to be between '%v' and '%v' (but it wasn't)!"
+ shouldNotHaveBeenBetween = "Expected '%v' NOT to be between '%v' and '%v' (but it was)!"
+ shouldHaveDifferentUpperAndLower = "The lower and upper bounds must be different values (they were both '%v')."
+
+ shouldHaveBeenBetweenOrEqual = "Expected '%v' to be between '%v' and '%v' or equal to one of them (but it wasn't)!"
+ shouldNotHaveBeenBetweenOrEqual = "Expected '%v' NOT to be between '%v' and '%v' or equal to one of them (but it was)!"
+
+ shouldHaveContained = "Expected the container (%v) to contain: '%v' (but it didn't)!"
+ shouldNotHaveContained = "Expected the container (%v) NOT to contain: '%v' (but it did)!"
+ shouldHaveBeenAValidCollection = "You must provide a valid container (was %v)!"
+
+ shouldHaveContainedKey = "Expected the %v to contain the key: %v (but it didn't)!"
+ shouldNotHaveContainedKey = "Expected the %v NOT to contain the key: %v (but it did)!"
+ shouldHaveBeenAValidMap = "You must provide a valid map type (was %v)!"
+
+ shouldHaveBeenIn = "Expected '%v' to be in the container (%v), but it wasn't!"
+ shouldNotHaveBeenIn = "Expected '%v' NOT to be in the container (%v), but it was!"
+
+ shouldHaveBeenEmpty = "Expected %+v to be empty (but it wasn't)!"
+ shouldNotHaveBeenEmpty = "Expected %+v to NOT be empty (but it was)!"
+
+ shouldHaveBeenAValidInteger = "You must provide a valid integer (was %v)!"
+ shouldHaveBeenAValidLength = "You must provide a valid positive integer (was %v)!"
+ shouldHaveHadLength = "Expected collection to have length equal to [%v], but it's length was [%v] instead! contents: %+v"
+
+ shouldHaveStartedWith = "Expected '%v'\nto start with '%v'\n(but it didn't)!"
+ shouldNotHaveStartedWith = "Expected '%v'\nNOT to start with '%v'\n(but it did)!"
+
+ shouldHaveEndedWith = "Expected '%v'\nto end with '%v'\n(but it didn't)!"
+ shouldNotHaveEndedWith = "Expected '%v'\nNOT to end with '%v'\n(but it did)!"
+
+ shouldAllBeStrings = "All arguments to this assertion must be strings (you provided: %v)."
+ shouldBothBeStrings = "Both arguments to this assertion must be strings (you provided %v and %v)."
+
+ shouldHaveContainedSubstring = "Expected '%s' to contain substring '%s' (but it didn't)!"
+ shouldNotHaveContainedSubstring = "Expected '%s' NOT to contain substring '%s' (but it did)!"
+
+ shouldBeString = "The argument to this assertion must be a string (you provided %v)."
+ shouldHaveBeenBlank = "Expected '%s' to be blank (but it wasn't)!"
+ shouldNotHaveBeenBlank = "Expected value to NOT be blank (but it was)!"
+
+ shouldUseVoidNiladicFunction = "You must provide a void, niladic function as the first argument!"
+ shouldHavePanicked = "Expected func() to panic (but it didn't)!"
+ shouldNotHavePanicked = "Expected func() NOT to panic (error: '%+v')!"
+
+ shouldHavePanickedWith = "Expected func() to panic with '%v' (but it panicked with '%v')!"
+ shouldNotHavePanickedWith = "Expected func() NOT to panic with '%v' (but it did)!"
+
+ shouldHaveBeenA = "Expected '%v' to be: '%v' (but was: '%v')!"
+ shouldNotHaveBeenA = "Expected '%v' to NOT be: '%v' (but it was)!"
+
+ shouldHaveImplemented = "Expected: '%v interface support'\nActual: '%v' does not implement the interface!"
+ shouldNotHaveImplemented = "Expected '%v'\nto NOT implement '%v'\n(but it did)!"
+ shouldCompareWithInterfacePointer = "The expected value must be a pointer to an interface type (eg. *fmt.Stringer)"
+ shouldNotBeNilActual = "The actual value was 'nil' and should be a value or a pointer to a value!"
+
+ shouldBeError = "Expected an error value (but was '%v' instead)!"
+ shouldBeErrorInvalidComparisonValue = "The final argument to this assertion must be a string or an error value (you provided: '%v')."
+
+ shouldUseTimes = "You must provide time instances as arguments to this assertion."
+ shouldUseTimeSlice = "You must provide a slice of time instances as the first argument to this assertion."
+ shouldUseDurationAndTime = "You must provide a duration and a time as arguments to this assertion."
+
+ shouldHaveHappenedBefore = "Expected '%v' to happen before '%v' (it happened '%v' after)!"
+ shouldHaveHappenedAfter = "Expected '%v' to happen after '%v' (it happened '%v' before)!"
+ shouldHaveHappenedBetween = "Expected '%v' to happen between '%v' and '%v' (it happened '%v' outside threshold)!"
+ shouldNotHaveHappenedOnOrBetween = "Expected '%v' to NOT happen on or between '%v' and '%v' (but it did)!"
+
+ // format params: incorrect-index, previous-index, previous-time, incorrect-index, incorrect-time
+ shouldHaveBeenChronological = "The 'Time' at index [%d] should have happened after the previous one (but it didn't!):\n [%d]: %s\n [%d]: %s (see, it happened before!)"
+ shouldNotHaveBeenchronological = "The provided times should NOT be chronological, but they were."
+)
diff --git a/backend/vendor/github.com/smartystreets/assertions/panic.go b/backend/vendor/github.com/smartystreets/assertions/panic.go
new file mode 100644
index 00000000..7e75db17
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/panic.go
@@ -0,0 +1,115 @@
+package assertions
+
+import "fmt"
+
+// ShouldPanic receives a void, niladic function and expects to recover a panic.
+func ShouldPanic(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered == nil {
+ message = shouldHavePanicked
+ } else {
+ message = success
+ }
+ }()
+ action()
+
+ return
+}
+
+// ShouldNotPanic receives a void, niladic function and expects to execute the function without any panic.
+func ShouldNotPanic(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered != nil {
+ message = fmt.Sprintf(shouldNotHavePanicked, recovered)
+ } else {
+ message = success
+ }
+ }()
+ action()
+
+ return
+}
+
+// ShouldPanicWith receives a void, niladic function and expects to recover a panic with the second argument as the content.
+func ShouldPanicWith(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered == nil {
+ message = shouldHavePanicked
+ } else {
+ if equal := ShouldEqual(recovered, expected[0]); equal != success {
+ message = serializer.serialize(expected[0], recovered, fmt.Sprintf(shouldHavePanickedWith, expected[0], recovered))
+ } else {
+ message = success
+ }
+ }
+ }()
+ action()
+
+ return
+}
+
+// ShouldNotPanicWith receives a void, niladic function and expects to recover a panic whose content differs from the second argument.
+func ShouldNotPanicWith(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered == nil {
+ message = success
+ } else {
+ if equal := ShouldEqual(recovered, expected[0]); equal == success {
+ message = fmt.Sprintf(shouldNotHavePanickedWith, expected[0])
+ } else {
+ message = success
+ }
+ }
+ }()
+ action()
+
+ return
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/quantity.go b/backend/vendor/github.com/smartystreets/assertions/quantity.go
new file mode 100644
index 00000000..f28b0a06
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/quantity.go
@@ -0,0 +1,141 @@
+package assertions
+
+import (
+ "fmt"
+
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// ShouldBeGreaterThan receives exactly two parameters and ensures that the first is greater than the second.
+func ShouldBeGreaterThan(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ if matchError := oglematchers.GreaterThan(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenGreater, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeGreaterThanOrEqualTo receives exactly two parameters and ensures that the first is greater than or equal to the second.
+func ShouldBeGreaterThanOrEqualTo(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if matchError := oglematchers.GreaterOrEqual(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenGreaterOrEqual, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeLessThan receives exactly two parameters and ensures that the first is less than the second.
+func ShouldBeLessThan(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if matchError := oglematchers.LessThan(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenLess, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeLessThan receives exactly two parameters and ensures that the first is less than or equal to the second.
+func ShouldBeLessThanOrEqualTo(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if matchError := oglematchers.LessOrEqual(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenLessOrEqual, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeBetween receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is between both bounds (but not equal to either of them).
+func ShouldBeBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if !isBetween(actual, lower, upper) {
+ return fmt.Sprintf(shouldHaveBeenBetween, actual, lower, upper)
+ }
+ return success
+}
+
+// ShouldNotBeBetween receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is NOT between both bounds.
+func ShouldNotBeBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if isBetween(actual, lower, upper) {
+ return fmt.Sprintf(shouldNotHaveBeenBetween, actual, lower, upper)
+ }
+ return success
+}
+func deriveBounds(values []interface{}) (lower interface{}, upper interface{}, fail string) {
+ lower = values[0]
+ upper = values[1]
+
+ if ShouldNotEqual(lower, upper) != success {
+ return nil, nil, fmt.Sprintf(shouldHaveDifferentUpperAndLower, lower)
+ } else if ShouldBeLessThan(lower, upper) != success {
+ lower, upper = upper, lower
+ }
+ return lower, upper, success
+}
+func isBetween(value, lower, upper interface{}) bool {
+ if ShouldBeGreaterThan(value, lower) != success {
+ return false
+ } else if ShouldBeLessThan(value, upper) != success {
+ return false
+ }
+ return true
+}
+
+// ShouldBeBetweenOrEqual receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is between both bounds or equal to one of them.
+func ShouldBeBetweenOrEqual(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if !isBetweenOrEqual(actual, lower, upper) {
+ return fmt.Sprintf(shouldHaveBeenBetweenOrEqual, actual, lower, upper)
+ }
+ return success
+}
+
+// ShouldNotBeBetweenOrEqual receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is nopt between the bounds nor equal to either of them.
+func ShouldNotBeBetweenOrEqual(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if isBetweenOrEqual(actual, lower, upper) {
+ return fmt.Sprintf(shouldNotHaveBeenBetweenOrEqual, actual, lower, upper)
+ }
+ return success
+}
+
+func isBetweenOrEqual(value, lower, upper interface{}) bool {
+ if ShouldBeGreaterThanOrEqualTo(value, lower) != success {
+ return false
+ } else if ShouldBeLessThanOrEqualTo(value, upper) != success {
+ return false
+ }
+ return true
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/serializer.go b/backend/vendor/github.com/smartystreets/assertions/serializer.go
new file mode 100644
index 00000000..f1e3570e
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/serializer.go
@@ -0,0 +1,70 @@
+package assertions
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/smartystreets/assertions/internal/go-render/render"
+)
+
+type Serializer interface {
+ serialize(expected, actual interface{}, message string) string
+ serializeDetailed(expected, actual interface{}, message string) string
+}
+
+type failureSerializer struct{}
+
+func (self *failureSerializer) serializeDetailed(expected, actual interface{}, message string) string {
+ if index := strings.Index(message, " Diff:"); index > 0 {
+ message = message[:index]
+ }
+ view := FailureView{
+ Message: message,
+ Expected: render.Render(expected),
+ Actual: render.Render(actual),
+ }
+ serialized, _ := json.Marshal(view)
+ return string(serialized)
+}
+
+func (self *failureSerializer) serialize(expected, actual interface{}, message string) string {
+ if index := strings.Index(message, " Diff:"); index > 0 {
+ message = message[:index]
+ }
+ view := FailureView{
+ Message: message,
+ Expected: fmt.Sprintf("%+v", expected),
+ Actual: fmt.Sprintf("%+v", actual),
+ }
+ serialized, _ := json.Marshal(view)
+ return string(serialized)
+}
+
+func newSerializer() *failureSerializer {
+ return &failureSerializer{}
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// This struct is also declared in github.com/smartystreets/goconvey/convey/reporting.
+// The json struct tags should be equal in both declarations.
+type FailureView struct {
+ Message string `json:"Message"`
+ Expected string `json:"Expected"`
+ Actual string `json:"Actual"`
+}
+
+///////////////////////////////////////////////////////
+
+// noopSerializer just gives back the original message. This is useful when we are using
+// the assertions from a context other than the GoConvey Web UI, that requires the JSON
+// structure provided by the failureSerializer.
+type noopSerializer struct{}
+
+func (self *noopSerializer) serialize(expected, actual interface{}, message string) string {
+ return message
+}
+func (self *noopSerializer) serializeDetailed(expected, actual interface{}, message string) string {
+ return message
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/strings.go b/backend/vendor/github.com/smartystreets/assertions/strings.go
new file mode 100644
index 00000000..dbc3f047
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/strings.go
@@ -0,0 +1,227 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// ShouldStartWith receives exactly 2 string parameters and ensures that the first starts with the second.
+func ShouldStartWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ prefix, prefixIsString := expected[0].(string)
+
+ if !valueIsString || !prefixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldStartWith(value, prefix)
+}
+func shouldStartWith(value, prefix string) string {
+ if !strings.HasPrefix(value, prefix) {
+ shortval := value
+ if len(shortval) > len(prefix) {
+ shortval = shortval[:len(prefix)] + "..."
+ }
+ return serializer.serialize(prefix, shortval, fmt.Sprintf(shouldHaveStartedWith, value, prefix))
+ }
+ return success
+}
+
+// ShouldNotStartWith receives exactly 2 string parameters and ensures that the first does not start with the second.
+func ShouldNotStartWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ prefix, prefixIsString := expected[0].(string)
+
+ if !valueIsString || !prefixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldNotStartWith(value, prefix)
+}
+func shouldNotStartWith(value, prefix string) string {
+ if strings.HasPrefix(value, prefix) {
+ if value == "" {
+ value = ""
+ }
+ if prefix == "" {
+ prefix = ""
+ }
+ return fmt.Sprintf(shouldNotHaveStartedWith, value, prefix)
+ }
+ return success
+}
+
+// ShouldEndWith receives exactly 2 string parameters and ensures that the first ends with the second.
+func ShouldEndWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ suffix, suffixIsString := expected[0].(string)
+
+ if !valueIsString || !suffixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldEndWith(value, suffix)
+}
+func shouldEndWith(value, suffix string) string {
+ if !strings.HasSuffix(value, suffix) {
+ shortval := value
+ if len(shortval) > len(suffix) {
+ shortval = "..." + shortval[len(shortval)-len(suffix):]
+ }
+ return serializer.serialize(suffix, shortval, fmt.Sprintf(shouldHaveEndedWith, value, suffix))
+ }
+ return success
+}
+
+// ShouldEndWith receives exactly 2 string parameters and ensures that the first does not end with the second.
+func ShouldNotEndWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ suffix, suffixIsString := expected[0].(string)
+
+ if !valueIsString || !suffixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldNotEndWith(value, suffix)
+}
+func shouldNotEndWith(value, suffix string) string {
+ if strings.HasSuffix(value, suffix) {
+ if value == "" {
+ value = ""
+ }
+ if suffix == "" {
+ suffix = ""
+ }
+ return fmt.Sprintf(shouldNotHaveEndedWith, value, suffix)
+ }
+ return success
+}
+
+// ShouldContainSubstring receives exactly 2 string parameters and ensures that the first contains the second as a substring.
+func ShouldContainSubstring(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ long, longOk := actual.(string)
+ short, shortOk := expected[0].(string)
+
+ if !longOk || !shortOk {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ if !strings.Contains(long, short) {
+ return serializer.serialize(expected[0], actual, fmt.Sprintf(shouldHaveContainedSubstring, long, short))
+ }
+ return success
+}
+
+// ShouldNotContainSubstring receives exactly 2 string parameters and ensures that the first does NOT contain the second as a substring.
+func ShouldNotContainSubstring(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ long, longOk := actual.(string)
+ short, shortOk := expected[0].(string)
+
+ if !longOk || !shortOk {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ if strings.Contains(long, short) {
+ return fmt.Sprintf(shouldNotHaveContainedSubstring, long, short)
+ }
+ return success
+}
+
+// ShouldBeBlank receives exactly 1 string parameter and ensures that it is equal to "".
+func ShouldBeBlank(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ value, ok := actual.(string)
+ if !ok {
+ return fmt.Sprintf(shouldBeString, reflect.TypeOf(actual))
+ }
+ if value != "" {
+ return serializer.serialize("", value, fmt.Sprintf(shouldHaveBeenBlank, value))
+ }
+ return success
+}
+
+// ShouldNotBeBlank receives exactly 1 string parameter and ensures that it is equal to "".
+func ShouldNotBeBlank(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ value, ok := actual.(string)
+ if !ok {
+ return fmt.Sprintf(shouldBeString, reflect.TypeOf(actual))
+ }
+ if value == "" {
+ return shouldNotHaveBeenBlank
+ }
+ return success
+}
+
+// ShouldEqualWithout receives exactly 3 string parameters and ensures that the first is equal to the second
+// after removing all instances of the third from the first using strings.Replace(first, third, "", -1).
+func ShouldEqualWithout(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualString, ok1 := actual.(string)
+ expectedString, ok2 := expected[0].(string)
+ replace, ok3 := expected[1].(string)
+
+ if !ok1 || !ok2 || !ok3 {
+ return fmt.Sprintf(shouldAllBeStrings, []reflect.Type{
+ reflect.TypeOf(actual),
+ reflect.TypeOf(expected[0]),
+ reflect.TypeOf(expected[1]),
+ })
+ }
+
+ replaced := strings.Replace(actualString, replace, "", -1)
+ if replaced == expectedString {
+ return ""
+ }
+
+ return fmt.Sprintf("Expected '%s' to equal '%s' but without any '%s' (but it didn't).", actualString, expectedString, replace)
+}
+
+// ShouldEqualTrimSpace receives exactly 2 string parameters and ensures that the first is equal to the second
+// after removing all leading and trailing whitespace using strings.TrimSpace(first).
+func ShouldEqualTrimSpace(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ actualString, valueIsString := actual.(string)
+ _, value2IsString := expected[0].(string)
+
+ if !valueIsString || !value2IsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ actualString = strings.TrimSpace(actualString)
+ return ShouldEqual(actualString, expected[0])
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/time.go b/backend/vendor/github.com/smartystreets/assertions/time.go
new file mode 100644
index 00000000..918ee284
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/time.go
@@ -0,0 +1,218 @@
+package assertions
+
+import (
+ "fmt"
+ "time"
+)
+
+// ShouldHappenBefore receives exactly 2 time.Time arguments and asserts that the first happens before the second.
+func ShouldHappenBefore(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+
+ if !actualTime.Before(expectedTime) {
+ return fmt.Sprintf(shouldHaveHappenedBefore, actualTime, expectedTime, actualTime.Sub(expectedTime))
+ }
+
+ return success
+}
+
+// ShouldHappenOnOrBefore receives exactly 2 time.Time arguments and asserts that the first happens on or before the second.
+func ShouldHappenOnOrBefore(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+
+ if actualTime.Equal(expectedTime) {
+ return success
+ }
+ return ShouldHappenBefore(actualTime, expectedTime)
+}
+
+// ShouldHappenAfter receives exactly 2 time.Time arguments and asserts that the first happens after the second.
+func ShouldHappenAfter(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+ if !actualTime.After(expectedTime) {
+ return fmt.Sprintf(shouldHaveHappenedAfter, actualTime, expectedTime, expectedTime.Sub(actualTime))
+ }
+ return success
+}
+
+// ShouldHappenOnOrAfter receives exactly 2 time.Time arguments and asserts that the first happens on or after the second.
+func ShouldHappenOnOrAfter(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+ if actualTime.Equal(expectedTime) {
+ return success
+ }
+ return ShouldHappenAfter(actualTime, expectedTime)
+}
+
+// ShouldHappenBetween receives exactly 3 time.Time arguments and asserts that the first happens between (not on) the second and third.
+func ShouldHappenBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ min, secondOk := expected[0].(time.Time)
+ max, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseTimes
+ }
+
+ if !actualTime.After(min) {
+ return fmt.Sprintf(shouldHaveHappenedBetween, actualTime, min, max, min.Sub(actualTime))
+ }
+ if !actualTime.Before(max) {
+ return fmt.Sprintf(shouldHaveHappenedBetween, actualTime, min, max, actualTime.Sub(max))
+ }
+ return success
+}
+
+// ShouldHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that the first happens between or on the second and third.
+func ShouldHappenOnOrBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ min, secondOk := expected[0].(time.Time)
+ max, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseTimes
+ }
+ if actualTime.Equal(min) || actualTime.Equal(max) {
+ return success
+ }
+ return ShouldHappenBetween(actualTime, min, max)
+}
+
+// ShouldNotHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that the first
+// does NOT happen between or on the second or third.
+func ShouldNotHappenOnOrBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ min, secondOk := expected[0].(time.Time)
+ max, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseTimes
+ }
+ if actualTime.Equal(min) || actualTime.Equal(max) {
+ return fmt.Sprintf(shouldNotHaveHappenedOnOrBetween, actualTime, min, max)
+ }
+ if actualTime.After(min) && actualTime.Before(max) {
+ return fmt.Sprintf(shouldNotHaveHappenedOnOrBetween, actualTime, min, max)
+ }
+ return success
+}
+
+// ShouldHappenWithin receives a time.Time, a time.Duration, and a time.Time (3 arguments)
+// and asserts that the first time.Time happens within or on the duration specified relative to
+// the other time.Time.
+func ShouldHappenWithin(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ tolerance, secondOk := expected[0].(time.Duration)
+ threshold, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseDurationAndTime
+ }
+
+ min := threshold.Add(-tolerance)
+ max := threshold.Add(tolerance)
+ return ShouldHappenOnOrBetween(actualTime, min, max)
+}
+
+// ShouldNotHappenWithin receives a time.Time, a time.Duration, and a time.Time (3 arguments)
+// and asserts that the first time.Time does NOT happen within or on the duration specified relative to
+// the other time.Time.
+func ShouldNotHappenWithin(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ tolerance, secondOk := expected[0].(time.Duration)
+ threshold, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseDurationAndTime
+ }
+
+ min := threshold.Add(-tolerance)
+ max := threshold.Add(tolerance)
+ return ShouldNotHappenOnOrBetween(actualTime, min, max)
+}
+
+// ShouldBeChronological receives a []time.Time slice and asserts that they are
+// in chronological order starting with the first time.Time as the earliest.
+func ShouldBeChronological(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ times, ok := actual.([]time.Time)
+ if !ok {
+ return shouldUseTimeSlice
+ }
+
+ var previous time.Time
+ for i, current := range times {
+ if i > 0 && current.Before(previous) {
+ return fmt.Sprintf(shouldHaveBeenChronological,
+ i, i-1, previous.String(), i, current.String())
+ }
+ previous = current
+ }
+ return ""
+}
+
+// ShouldNotBeChronological receives a []time.Time slice and asserts that they are
+// NOT in chronological order.
+func ShouldNotBeChronological(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ if _, ok := actual.([]time.Time); !ok {
+ return shouldUseTimeSlice
+ }
+ result := ShouldBeChronological(actual, expected...)
+ if result != "" {
+ return ""
+ }
+ return shouldNotHaveBeenchronological
+}
diff --git a/backend/vendor/github.com/smartystreets/assertions/type.go b/backend/vendor/github.com/smartystreets/assertions/type.go
new file mode 100644
index 00000000..d2d1dc86
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/assertions/type.go
@@ -0,0 +1,134 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// ShouldHaveSameTypeAs receives exactly two parameters and compares their underlying types for equality.
+func ShouldHaveSameTypeAs(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ first := reflect.TypeOf(actual)
+ second := reflect.TypeOf(expected[0])
+
+ if first != second {
+ return serializer.serialize(second, first, fmt.Sprintf(shouldHaveBeenA, actual, second, first))
+ }
+
+ return success
+}
+
+// ShouldNotHaveSameTypeAs receives exactly two parameters and compares their underlying types for inequality.
+func ShouldNotHaveSameTypeAs(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ first := reflect.TypeOf(actual)
+ second := reflect.TypeOf(expected[0])
+
+ if (actual == nil && expected[0] == nil) || first == second {
+ return fmt.Sprintf(shouldNotHaveBeenA, actual, second)
+ }
+ return success
+}
+
+// ShouldImplement receives exactly two parameters and ensures
+// that the first implements the interface type of the second.
+func ShouldImplement(actual interface{}, expectedList ...interface{}) string {
+ if fail := need(1, expectedList); fail != success {
+ return fail
+ }
+
+ expected := expectedList[0]
+ if fail := ShouldBeNil(expected); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ if fail := ShouldNotBeNil(actual); fail != success {
+ return shouldNotBeNilActual
+ }
+
+ var actualType reflect.Type
+ if reflect.TypeOf(actual).Kind() != reflect.Ptr {
+ actualType = reflect.PtrTo(reflect.TypeOf(actual))
+ } else {
+ actualType = reflect.TypeOf(actual)
+ }
+
+ expectedType := reflect.TypeOf(expected)
+ if fail := ShouldNotBeNil(expectedType); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ expectedInterface := expectedType.Elem()
+
+ if !actualType.Implements(expectedInterface) {
+ return fmt.Sprintf(shouldHaveImplemented, expectedInterface, actualType)
+ }
+ return success
+}
+
+// ShouldNotImplement receives exactly two parameters and ensures
+// that the first does NOT implement the interface type of the second.
+func ShouldNotImplement(actual interface{}, expectedList ...interface{}) string {
+ if fail := need(1, expectedList); fail != success {
+ return fail
+ }
+
+ expected := expectedList[0]
+ if fail := ShouldBeNil(expected); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ if fail := ShouldNotBeNil(actual); fail != success {
+ return shouldNotBeNilActual
+ }
+
+ var actualType reflect.Type
+ if reflect.TypeOf(actual).Kind() != reflect.Ptr {
+ actualType = reflect.PtrTo(reflect.TypeOf(actual))
+ } else {
+ actualType = reflect.TypeOf(actual)
+ }
+
+ expectedType := reflect.TypeOf(expected)
+ if fail := ShouldNotBeNil(expectedType); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ expectedInterface := expectedType.Elem()
+
+ if actualType.Implements(expectedInterface) {
+ return fmt.Sprintf(shouldNotHaveImplemented, actualType, expectedInterface)
+ }
+ return success
+}
+
+// ShouldBeError asserts that the first argument implements the error interface.
+// It also compares the first argument against the second argument if provided
+// (which must be an error message string or another error value).
+func ShouldBeError(actual interface{}, expected ...interface{}) string {
+ if fail := atMost(1, expected); fail != success {
+ return fail
+ }
+
+ if !isError(actual) {
+ return fmt.Sprintf(shouldBeError, reflect.TypeOf(actual))
+ }
+
+ if len(expected) == 0 {
+ return success
+ }
+
+ if expected := expected[0]; !isString(expected) && !isError(expected) {
+ return fmt.Sprintf(shouldBeErrorInvalidComparisonValue, reflect.TypeOf(expected))
+ }
+ return ShouldEqual(fmt.Sprint(actual), fmt.Sprint(expected[0]))
+}
+
+func isString(value interface{}) bool { _, ok := value.(string); return ok }
+func isError(value interface{}) bool { _, ok := value.(error); return ok }
diff --git a/backend/vendor/github.com/smartystreets/goconvey/LICENSE.md b/backend/vendor/github.com/smartystreets/goconvey/LICENSE.md
new file mode 100644
index 00000000..3f87a40e
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/LICENSE.md
@@ -0,0 +1,23 @@
+Copyright (c) 2016 SmartyStreets, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+NOTE: Various optional and subordinate components carry their own licensing
+requirements and restrictions. Use of those components is subject to the terms
+and conditions outlined the respective license of each component.
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/assertions.go b/backend/vendor/github.com/smartystreets/goconvey/convey/assertions.go
new file mode 100644
index 00000000..97e3bec8
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/assertions.go
@@ -0,0 +1,71 @@
+package convey
+
+import "github.com/smartystreets/assertions"
+
+var (
+ ShouldEqual = assertions.ShouldEqual
+ ShouldNotEqual = assertions.ShouldNotEqual
+ ShouldAlmostEqual = assertions.ShouldAlmostEqual
+ ShouldNotAlmostEqual = assertions.ShouldNotAlmostEqual
+ ShouldResemble = assertions.ShouldResemble
+ ShouldNotResemble = assertions.ShouldNotResemble
+ ShouldPointTo = assertions.ShouldPointTo
+ ShouldNotPointTo = assertions.ShouldNotPointTo
+ ShouldBeNil = assertions.ShouldBeNil
+ ShouldNotBeNil = assertions.ShouldNotBeNil
+ ShouldBeTrue = assertions.ShouldBeTrue
+ ShouldBeFalse = assertions.ShouldBeFalse
+ ShouldBeZeroValue = assertions.ShouldBeZeroValue
+ ShouldNotBeZeroValue = assertions.ShouldNotBeZeroValue
+
+ ShouldBeGreaterThan = assertions.ShouldBeGreaterThan
+ ShouldBeGreaterThanOrEqualTo = assertions.ShouldBeGreaterThanOrEqualTo
+ ShouldBeLessThan = assertions.ShouldBeLessThan
+ ShouldBeLessThanOrEqualTo = assertions.ShouldBeLessThanOrEqualTo
+ ShouldBeBetween = assertions.ShouldBeBetween
+ ShouldNotBeBetween = assertions.ShouldNotBeBetween
+ ShouldBeBetweenOrEqual = assertions.ShouldBeBetweenOrEqual
+ ShouldNotBeBetweenOrEqual = assertions.ShouldNotBeBetweenOrEqual
+
+ ShouldContain = assertions.ShouldContain
+ ShouldNotContain = assertions.ShouldNotContain
+ ShouldContainKey = assertions.ShouldContainKey
+ ShouldNotContainKey = assertions.ShouldNotContainKey
+ ShouldBeIn = assertions.ShouldBeIn
+ ShouldNotBeIn = assertions.ShouldNotBeIn
+ ShouldBeEmpty = assertions.ShouldBeEmpty
+ ShouldNotBeEmpty = assertions.ShouldNotBeEmpty
+ ShouldHaveLength = assertions.ShouldHaveLength
+
+ ShouldStartWith = assertions.ShouldStartWith
+ ShouldNotStartWith = assertions.ShouldNotStartWith
+ ShouldEndWith = assertions.ShouldEndWith
+ ShouldNotEndWith = assertions.ShouldNotEndWith
+ ShouldBeBlank = assertions.ShouldBeBlank
+ ShouldNotBeBlank = assertions.ShouldNotBeBlank
+ ShouldContainSubstring = assertions.ShouldContainSubstring
+ ShouldNotContainSubstring = assertions.ShouldNotContainSubstring
+
+ ShouldPanic = assertions.ShouldPanic
+ ShouldNotPanic = assertions.ShouldNotPanic
+ ShouldPanicWith = assertions.ShouldPanicWith
+ ShouldNotPanicWith = assertions.ShouldNotPanicWith
+
+ ShouldHaveSameTypeAs = assertions.ShouldHaveSameTypeAs
+ ShouldNotHaveSameTypeAs = assertions.ShouldNotHaveSameTypeAs
+ ShouldImplement = assertions.ShouldImplement
+ ShouldNotImplement = assertions.ShouldNotImplement
+
+ ShouldHappenBefore = assertions.ShouldHappenBefore
+ ShouldHappenOnOrBefore = assertions.ShouldHappenOnOrBefore
+ ShouldHappenAfter = assertions.ShouldHappenAfter
+ ShouldHappenOnOrAfter = assertions.ShouldHappenOnOrAfter
+ ShouldHappenBetween = assertions.ShouldHappenBetween
+ ShouldHappenOnOrBetween = assertions.ShouldHappenOnOrBetween
+ ShouldNotHappenOnOrBetween = assertions.ShouldNotHappenOnOrBetween
+ ShouldHappenWithin = assertions.ShouldHappenWithin
+ ShouldNotHappenWithin = assertions.ShouldNotHappenWithin
+ ShouldBeChronological = assertions.ShouldBeChronological
+
+ ShouldBeError = assertions.ShouldBeError
+)
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/context.go b/backend/vendor/github.com/smartystreets/goconvey/convey/context.go
new file mode 100644
index 00000000..2c75c2d7
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/context.go
@@ -0,0 +1,272 @@
+package convey
+
+import (
+ "fmt"
+
+ "github.com/jtolds/gls"
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+type conveyErr struct {
+ fmt string
+ params []interface{}
+}
+
+func (e *conveyErr) Error() string {
+ return fmt.Sprintf(e.fmt, e.params...)
+}
+
+func conveyPanic(fmt string, params ...interface{}) {
+ panic(&conveyErr{fmt, params})
+}
+
+const (
+ missingGoTest = `Top-level calls to Convey(...) need a reference to the *testing.T.
+ Hint: Convey("description here", t, func() { /* notice that the second argument was the *testing.T (t)! */ }) `
+ extraGoTest = `Only the top-level call to Convey(...) needs a reference to the *testing.T.`
+ noStackContext = "Convey operation made without context on goroutine stack.\n" +
+ "Hint: Perhaps you meant to use `Convey(..., func(c C){...})` ?"
+ differentConveySituations = "Different set of Convey statements on subsequent pass!\nDid not expect %#v."
+ multipleIdenticalConvey = "Multiple convey suites with identical names: %#v"
+)
+
+const (
+ failureHalt = "___FAILURE_HALT___"
+
+ nodeKey = "node"
+)
+
+///////////////////////////////// Stack Context /////////////////////////////////
+
+func getCurrentContext() *context {
+ ctx, ok := ctxMgr.GetValue(nodeKey)
+ if ok {
+ return ctx.(*context)
+ }
+ return nil
+}
+
+func mustGetCurrentContext() *context {
+ ctx := getCurrentContext()
+ if ctx == nil {
+ conveyPanic(noStackContext)
+ }
+ return ctx
+}
+
+//////////////////////////////////// Context ////////////////////////////////////
+
+// context magically handles all coordination of Convey's and So assertions.
+//
+// It is tracked on the stack as goroutine-local-storage with the gls package,
+// or explicitly if the user decides to call convey like:
+//
+// Convey(..., func(c C) {
+// c.So(...)
+// })
+//
+// This implements the `C` interface.
+type context struct {
+ reporter reporting.Reporter
+
+ children map[string]*context
+
+ resets []func()
+
+ executedOnce bool
+ expectChildRun *bool
+ complete bool
+
+ focus bool
+ failureMode FailureMode
+}
+
+// rootConvey is the main entry point to a test suite. This is called when
+// there's no context in the stack already, and items must contain a `t` object,
+// or this panics.
+func rootConvey(items ...interface{}) {
+ entry := discover(items)
+
+ if entry.Test == nil {
+ conveyPanic(missingGoTest)
+ }
+
+ expectChildRun := true
+ ctx := &context{
+ reporter: buildReporter(),
+
+ children: make(map[string]*context),
+
+ expectChildRun: &expectChildRun,
+
+ focus: entry.Focus,
+ failureMode: defaultFailureMode.combine(entry.FailMode),
+ }
+ ctxMgr.SetValues(gls.Values{nodeKey: ctx}, func() {
+ ctx.reporter.BeginStory(reporting.NewStoryReport(entry.Test))
+ defer ctx.reporter.EndStory()
+
+ for ctx.shouldVisit() {
+ ctx.conveyInner(entry.Situation, entry.Func)
+ expectChildRun = true
+ }
+ })
+}
+
+//////////////////////////////////// Methods ////////////////////////////////////
+
+func (ctx *context) SkipConvey(items ...interface{}) {
+ ctx.Convey(items, skipConvey)
+}
+
+func (ctx *context) FocusConvey(items ...interface{}) {
+ ctx.Convey(items, focusConvey)
+}
+
+func (ctx *context) Convey(items ...interface{}) {
+ entry := discover(items)
+
+ // we're a branch, or leaf (on the wind)
+ if entry.Test != nil {
+ conveyPanic(extraGoTest)
+ }
+ if ctx.focus && !entry.Focus {
+ return
+ }
+
+ var inner_ctx *context
+ if ctx.executedOnce {
+ var ok bool
+ inner_ctx, ok = ctx.children[entry.Situation]
+ if !ok {
+ conveyPanic(differentConveySituations, entry.Situation)
+ }
+ } else {
+ if _, ok := ctx.children[entry.Situation]; ok {
+ conveyPanic(multipleIdenticalConvey, entry.Situation)
+ }
+ inner_ctx = &context{
+ reporter: ctx.reporter,
+
+ children: make(map[string]*context),
+
+ expectChildRun: ctx.expectChildRun,
+
+ focus: entry.Focus,
+ failureMode: ctx.failureMode.combine(entry.FailMode),
+ }
+ ctx.children[entry.Situation] = inner_ctx
+ }
+
+ if inner_ctx.shouldVisit() {
+ ctxMgr.SetValues(gls.Values{nodeKey: inner_ctx}, func() {
+ inner_ctx.conveyInner(entry.Situation, entry.Func)
+ })
+ }
+}
+
+func (ctx *context) SkipSo(stuff ...interface{}) {
+ ctx.assertionReport(reporting.NewSkipReport())
+}
+
+func (ctx *context) So(actual interface{}, assert assertion, expected ...interface{}) {
+ if result := assert(actual, expected...); result == assertionSuccess {
+ ctx.assertionReport(reporting.NewSuccessReport())
+ } else {
+ ctx.assertionReport(reporting.NewFailureReport(result))
+ }
+}
+
+func (ctx *context) Reset(action func()) {
+ /* TODO: Failure mode configuration */
+ ctx.resets = append(ctx.resets, action)
+}
+
+func (ctx *context) Print(items ...interface{}) (int, error) {
+ fmt.Fprint(ctx.reporter, items...)
+ return fmt.Print(items...)
+}
+
+func (ctx *context) Println(items ...interface{}) (int, error) {
+ fmt.Fprintln(ctx.reporter, items...)
+ return fmt.Println(items...)
+}
+
+func (ctx *context) Printf(format string, items ...interface{}) (int, error) {
+ fmt.Fprintf(ctx.reporter, format, items...)
+ return fmt.Printf(format, items...)
+}
+
+//////////////////////////////////// Private ////////////////////////////////////
+
+// shouldVisit returns true iff we should traverse down into a Convey. Note
+// that just because we don't traverse a Convey this time, doesn't mean that
+// we may not traverse it on a subsequent pass.
+func (c *context) shouldVisit() bool {
+ return !c.complete && *c.expectChildRun
+}
+
+// conveyInner is the function which actually executes the user's anonymous test
+// function body. At this point, Convey or RootConvey has decided that this
+// function should actually run.
+func (ctx *context) conveyInner(situation string, f func(C)) {
+ // Record/Reset state for next time.
+ defer func() {
+ ctx.executedOnce = true
+
+ // This is only needed at the leaves, but there's no harm in also setting it
+ // when returning from branch Convey's
+ *ctx.expectChildRun = false
+ }()
+
+ // Set up+tear down our scope for the reporter
+ ctx.reporter.Enter(reporting.NewScopeReport(situation))
+ defer ctx.reporter.Exit()
+
+ // Recover from any panics in f, and assign the `complete` status for this
+ // node of the tree.
+ defer func() {
+ ctx.complete = true
+ if problem := recover(); problem != nil {
+ if problem, ok := problem.(*conveyErr); ok {
+ panic(problem)
+ }
+ if problem != failureHalt {
+ ctx.reporter.Report(reporting.NewErrorReport(problem))
+ }
+ } else {
+ for _, child := range ctx.children {
+ if !child.complete {
+ ctx.complete = false
+ return
+ }
+ }
+ }
+ }()
+
+ // Resets are registered as the `f` function executes, so nil them here.
+ // All resets are run in registration order (FIFO).
+ ctx.resets = []func(){}
+ defer func() {
+ for _, r := range ctx.resets {
+ // panics handled by the previous defer
+ r()
+ }
+ }()
+
+ if f == nil {
+ // if f is nil, this was either a Convey(..., nil), or a SkipConvey
+ ctx.reporter.Report(reporting.NewSkipReport())
+ } else {
+ f(ctx)
+ }
+}
+
+// assertionReport is a helper for So and SkipSo which makes the report and
+// then possibly panics, depending on the current context's failureMode.
+func (ctx *context) assertionReport(r *reporting.AssertionResult) {
+ ctx.reporter.Report(r)
+ if r.Failure != "" && ctx.failureMode == FailureHalts {
+ panic(failureHalt)
+ }
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/convey.goconvey b/backend/vendor/github.com/smartystreets/goconvey/convey/convey.goconvey
new file mode 100644
index 00000000..a2d9327d
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/convey.goconvey
@@ -0,0 +1,4 @@
+#ignore
+-timeout=1s
+#-covermode=count
+#-coverpkg=github.com/smartystreets/goconvey/convey,github.com/smartystreets/goconvey/convey/gotest,github.com/smartystreets/goconvey/convey/reporting
\ No newline at end of file
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/discovery.go b/backend/vendor/github.com/smartystreets/goconvey/convey/discovery.go
new file mode 100644
index 00000000..eb8d4cb2
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/discovery.go
@@ -0,0 +1,103 @@
+package convey
+
+type actionSpecifier uint8
+
+const (
+ noSpecifier actionSpecifier = iota
+ skipConvey
+ focusConvey
+)
+
+type suite struct {
+ Situation string
+ Test t
+ Focus bool
+ Func func(C) // nil means skipped
+ FailMode FailureMode
+}
+
+func newSuite(situation string, failureMode FailureMode, f func(C), test t, specifier actionSpecifier) *suite {
+ ret := &suite{
+ Situation: situation,
+ Test: test,
+ Func: f,
+ FailMode: failureMode,
+ }
+ switch specifier {
+ case skipConvey:
+ ret.Func = nil
+ case focusConvey:
+ ret.Focus = true
+ }
+ return ret
+}
+
+func discover(items []interface{}) *suite {
+ name, items := parseName(items)
+ test, items := parseGoTest(items)
+ failure, items := parseFailureMode(items)
+ action, items := parseAction(items)
+ specifier, items := parseSpecifier(items)
+
+ if len(items) != 0 {
+ conveyPanic(parseError)
+ }
+
+ return newSuite(name, failure, action, test, specifier)
+}
+func item(items []interface{}) interface{} {
+ if len(items) == 0 {
+ conveyPanic(parseError)
+ }
+ return items[0]
+}
+func parseName(items []interface{}) (string, []interface{}) {
+ if name, parsed := item(items).(string); parsed {
+ return name, items[1:]
+ }
+ conveyPanic(parseError)
+ panic("never get here")
+}
+func parseGoTest(items []interface{}) (t, []interface{}) {
+ if test, parsed := item(items).(t); parsed {
+ return test, items[1:]
+ }
+ return nil, items
+}
+func parseFailureMode(items []interface{}) (FailureMode, []interface{}) {
+ if mode, parsed := item(items).(FailureMode); parsed {
+ return mode, items[1:]
+ }
+ return FailureInherits, items
+}
+func parseAction(items []interface{}) (func(C), []interface{}) {
+ switch x := item(items).(type) {
+ case nil:
+ return nil, items[1:]
+ case func(C):
+ return x, items[1:]
+ case func():
+ return func(C) { x() }, items[1:]
+ }
+ conveyPanic(parseError)
+ panic("never get here")
+}
+func parseSpecifier(items []interface{}) (actionSpecifier, []interface{}) {
+ if len(items) == 0 {
+ return noSpecifier, items
+ }
+ if spec, ok := items[0].(actionSpecifier); ok {
+ return spec, items[1:]
+ }
+ conveyPanic(parseError)
+ panic("never get here")
+}
+
+// This interface allows us to pass the *testing.T struct
+// throughout the internals of this package without ever
+// having to import the "testing" package.
+type t interface {
+ Fail()
+}
+
+const parseError = "You must provide a name (string), then a *testing.T (if in outermost scope), an optional FailureMode, and then an action (func())."
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/doc.go b/backend/vendor/github.com/smartystreets/goconvey/convey/doc.go
new file mode 100644
index 00000000..e4f7b51a
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/doc.go
@@ -0,0 +1,218 @@
+// Package convey contains all of the public-facing entry points to this project.
+// This means that it should never be required of the user to import any other
+// packages from this project as they serve internal purposes.
+package convey
+
+import "github.com/smartystreets/goconvey/convey/reporting"
+
+////////////////////////////////// suite //////////////////////////////////
+
+// C is the Convey context which you can optionally obtain in your action
+// by calling Convey like:
+//
+// Convey(..., func(c C) {
+// ...
+// })
+//
+// See the documentation on Convey for more details.
+//
+// All methods in this context behave identically to the global functions of the
+// same name in this package.
+type C interface {
+ Convey(items ...interface{})
+ SkipConvey(items ...interface{})
+ FocusConvey(items ...interface{})
+
+ So(actual interface{}, assert assertion, expected ...interface{})
+ SkipSo(stuff ...interface{})
+
+ Reset(action func())
+
+ Println(items ...interface{}) (int, error)
+ Print(items ...interface{}) (int, error)
+ Printf(format string, items ...interface{}) (int, error)
+}
+
+// Convey is the method intended for use when declaring the scopes of
+// a specification. Each scope has a description and a func() which may contain
+// other calls to Convey(), Reset() or Should-style assertions. Convey calls can
+// be nested as far as you see fit.
+//
+// IMPORTANT NOTE: The top-level Convey() within a Test method
+// must conform to the following signature:
+//
+// Convey(description string, t *testing.T, action func())
+//
+// All other calls should look like this (no need to pass in *testing.T):
+//
+// Convey(description string, action func())
+//
+// Don't worry, goconvey will panic if you get it wrong so you can fix it.
+//
+// Additionally, you may explicitly obtain access to the Convey context by doing:
+//
+// Convey(description string, action func(c C))
+//
+// You may need to do this if you want to pass the context through to a
+// goroutine, or to close over the context in a handler to a library which
+// calls your handler in a goroutine (httptest comes to mind).
+//
+// All Convey()-blocks also accept an optional parameter of FailureMode which sets
+// how goconvey should treat failures for So()-assertions in the block and
+// nested blocks. See the constants in this file for the available options.
+//
+// By default it will inherit from its parent block and the top-level blocks
+// default to the FailureHalts setting.
+//
+// This parameter is inserted before the block itself:
+//
+// Convey(description string, t *testing.T, mode FailureMode, action func())
+// Convey(description string, mode FailureMode, action func())
+//
+// See the examples package for, well, examples.
+func Convey(items ...interface{}) {
+ if ctx := getCurrentContext(); ctx == nil {
+ rootConvey(items...)
+ } else {
+ ctx.Convey(items...)
+ }
+}
+
+// SkipConvey is analogous to Convey except that the scope is not executed
+// (which means that child scopes defined within this scope are not run either).
+// The reporter will be notified that this step was skipped.
+func SkipConvey(items ...interface{}) {
+ Convey(append(items, skipConvey)...)
+}
+
+// FocusConvey is has the inverse effect of SkipConvey. If the top-level
+// Convey is changed to `FocusConvey`, only nested scopes that are defined
+// with FocusConvey will be run. The rest will be ignored completely. This
+// is handy when debugging a large suite that runs a misbehaving function
+// repeatedly as you can disable all but one of that function
+// without swaths of `SkipConvey` calls, just a targeted chain of calls
+// to FocusConvey.
+func FocusConvey(items ...interface{}) {
+ Convey(append(items, focusConvey)...)
+}
+
+// Reset registers a cleanup function to be run after each Convey()
+// in the same scope. See the examples package for a simple use case.
+func Reset(action func()) {
+ mustGetCurrentContext().Reset(action)
+}
+
+/////////////////////////////////// Assertions ///////////////////////////////////
+
+// assertion is an alias for a function with a signature that the convey.So()
+// method can handle. Any future or custom assertions should conform to this
+// method signature. The return value should be an empty string if the assertion
+// passes and a well-formed failure message if not.
+type assertion func(actual interface{}, expected ...interface{}) string
+
+const assertionSuccess = ""
+
+// So is the means by which assertions are made against the system under test.
+// The majority of exported names in the assertions package begin with the word
+// 'Should' and describe how the first argument (actual) should compare with any
+// of the final (expected) arguments. How many final arguments are accepted
+// depends on the particular assertion that is passed in as the assert argument.
+// See the examples package for use cases and the assertions package for
+// documentation on specific assertion methods. A failing assertion will
+// cause t.Fail() to be invoked--you should never call this method (or other
+// failure-inducing methods) in your test code. Leave that to GoConvey.
+func So(actual interface{}, assert assertion, expected ...interface{}) {
+ mustGetCurrentContext().So(actual, assert, expected...)
+}
+
+// SkipSo is analogous to So except that the assertion that would have been passed
+// to So is not executed and the reporter is notified that the assertion was skipped.
+func SkipSo(stuff ...interface{}) {
+ mustGetCurrentContext().SkipSo()
+}
+
+// FailureMode is a type which determines how the So() blocks should fail
+// if their assertion fails. See constants further down for acceptable values
+type FailureMode string
+
+const (
+
+ // FailureContinues is a failure mode which prevents failing
+ // So()-assertions from halting Convey-block execution, instead
+ // allowing the test to continue past failing So()-assertions.
+ FailureContinues FailureMode = "continue"
+
+ // FailureHalts is the default setting for a top-level Convey()-block
+ // and will cause all failing So()-assertions to halt further execution
+ // in that test-arm and continue on to the next arm.
+ FailureHalts FailureMode = "halt"
+
+ // FailureInherits is the default setting for failure-mode, it will
+ // default to the failure-mode of the parent block. You should never
+ // need to specify this mode in your tests..
+ FailureInherits FailureMode = "inherits"
+)
+
+func (f FailureMode) combine(other FailureMode) FailureMode {
+ if other == FailureInherits {
+ return f
+ }
+ return other
+}
+
+var defaultFailureMode FailureMode = FailureHalts
+
+// SetDefaultFailureMode allows you to specify the default failure mode
+// for all Convey blocks. It is meant to be used in an init function to
+// allow the default mode to be changdd across all tests for an entire packgae
+// but it can be used anywhere.
+func SetDefaultFailureMode(mode FailureMode) {
+ if mode == FailureContinues || mode == FailureHalts {
+ defaultFailureMode = mode
+ } else {
+ panic("You may only use the constants named 'FailureContinues' and 'FailureHalts' as default failure modes.")
+ }
+}
+
+//////////////////////////////////// Print functions ////////////////////////////////////
+
+// Print is analogous to fmt.Print (and it even calls fmt.Print). It ensures that
+// output is aligned with the corresponding scopes in the web UI.
+func Print(items ...interface{}) (written int, err error) {
+ return mustGetCurrentContext().Print(items...)
+}
+
+// Print is analogous to fmt.Println (and it even calls fmt.Println). It ensures that
+// output is aligned with the corresponding scopes in the web UI.
+func Println(items ...interface{}) (written int, err error) {
+ return mustGetCurrentContext().Println(items...)
+}
+
+// Print is analogous to fmt.Printf (and it even calls fmt.Printf). It ensures that
+// output is aligned with the corresponding scopes in the web UI.
+func Printf(format string, items ...interface{}) (written int, err error) {
+ return mustGetCurrentContext().Printf(format, items...)
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// SuppressConsoleStatistics prevents automatic printing of console statistics.
+// Calling PrintConsoleStatistics explicitly will force printing of statistics.
+func SuppressConsoleStatistics() {
+ reporting.SuppressConsoleStatistics()
+}
+
+// PrintConsoleStatistics may be called at any time to print assertion statistics.
+// Generally, the best place to do this would be in a TestMain function,
+// after all tests have been run. Something like this:
+//
+// func TestMain(m *testing.M) {
+// convey.SuppressConsoleStatistics()
+// result := m.Run()
+// convey.PrintConsoleStatistics()
+// os.Exit(result)
+// }
+//
+func PrintConsoleStatistics() {
+ reporting.PrintConsoleStatistics()
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go b/backend/vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go
new file mode 100644
index 00000000..167c8fb7
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go
@@ -0,0 +1,28 @@
+// Package gotest contains internal functionality. Although this package
+// contains one or more exported names it is not intended for public
+// consumption. See the examples package for how to use this project.
+package gotest
+
+import (
+ "runtime"
+ "strings"
+)
+
+func ResolveExternalCaller() (file string, line int, name string) {
+ var caller_id uintptr
+ callers := runtime.Callers(0, callStack)
+
+ for x := 0; x < callers; x++ {
+ caller_id, file, line, _ = runtime.Caller(x)
+ if strings.HasSuffix(file, "_test.go") || strings.HasSuffix(file, "_tests.go") {
+ name = runtime.FuncForPC(caller_id).Name()
+ return
+ }
+ }
+ file, line, name = "", -1, ""
+ return // panic?
+}
+
+const maxStackDepth = 100 // This had better be enough...
+
+var callStack []uintptr = make([]uintptr, maxStackDepth, maxStackDepth)
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/init.go b/backend/vendor/github.com/smartystreets/goconvey/convey/init.go
new file mode 100644
index 00000000..cb930a0d
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/init.go
@@ -0,0 +1,81 @@
+package convey
+
+import (
+ "flag"
+ "os"
+
+ "github.com/jtolds/gls"
+ "github.com/smartystreets/assertions"
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+func init() {
+ assertions.GoConveyMode(true)
+
+ declareFlags()
+
+ ctxMgr = gls.NewContextManager()
+}
+
+func declareFlags() {
+ flag.BoolVar(&json, "convey-json", false, "When true, emits results in JSON blocks. Default: 'false'")
+ flag.BoolVar(&silent, "convey-silent", false, "When true, all output from GoConvey is suppressed.")
+ flag.BoolVar(&story, "convey-story", false, "When true, emits story output, otherwise emits dot output. When not provided, this flag mirrors the value of the '-test.v' flag")
+
+ if noStoryFlagProvided() {
+ story = verboseEnabled
+ }
+
+ // FYI: flag.Parse() is called from the testing package.
+}
+
+func noStoryFlagProvided() bool {
+ return !story && !storyDisabled
+}
+
+func buildReporter() reporting.Reporter {
+ selectReporter := os.Getenv("GOCONVEY_REPORTER")
+
+ switch {
+ case testReporter != nil:
+ return testReporter
+ case json || selectReporter == "json":
+ return reporting.BuildJsonReporter()
+ case silent || selectReporter == "silent":
+ return reporting.BuildSilentReporter()
+ case selectReporter == "dot":
+ // Story is turned on when verbose is set, so we need to check for dot reporter first.
+ return reporting.BuildDotReporter()
+ case story || selectReporter == "story":
+ return reporting.BuildStoryReporter()
+ default:
+ return reporting.BuildDotReporter()
+ }
+}
+
+var (
+ ctxMgr *gls.ContextManager
+
+ // only set by internal tests
+ testReporter reporting.Reporter
+)
+
+var (
+ json bool
+ silent bool
+ story bool
+
+ verboseEnabled = flagFound("-test.v=true")
+ storyDisabled = flagFound("-story=false")
+)
+
+// flagFound parses the command line args manually for flags defined in other
+// packages. Like the '-v' flag from the "testing" package, for instance.
+func flagFound(flagValue string) bool {
+ for _, arg := range os.Args {
+ if arg == flagValue {
+ return true
+ }
+ }
+ return false
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/nilReporter.go b/backend/vendor/github.com/smartystreets/goconvey/convey/nilReporter.go
new file mode 100644
index 00000000..777b2a51
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/nilReporter.go
@@ -0,0 +1,15 @@
+package convey
+
+import (
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+type nilReporter struct{}
+
+func (self *nilReporter) BeginStory(story *reporting.StoryReport) {}
+func (self *nilReporter) Enter(scope *reporting.ScopeReport) {}
+func (self *nilReporter) Report(report *reporting.AssertionResult) {}
+func (self *nilReporter) Exit() {}
+func (self *nilReporter) EndStory() {}
+func (self *nilReporter) Write(p []byte) (int, error) { return len(p), nil }
+func newNilReporter() *nilReporter { return &nilReporter{} }
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/console.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/console.go
new file mode 100644
index 00000000..7bf67dbb
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/console.go
@@ -0,0 +1,16 @@
+package reporting
+
+import (
+ "fmt"
+ "io"
+)
+
+type console struct{}
+
+func (self *console) Write(p []byte) (n int, err error) {
+ return fmt.Print(string(p))
+}
+
+func NewConsole() io.Writer {
+ return new(console)
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go
new file mode 100644
index 00000000..a37d0019
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go
@@ -0,0 +1,5 @@
+// Package reporting contains internal functionality related
+// to console reporting and output. Although this package has
+// exported names is not intended for public consumption. See the
+// examples package for how to use this project.
+package reporting
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go
new file mode 100644
index 00000000..47d57c6b
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go
@@ -0,0 +1,40 @@
+package reporting
+
+import "fmt"
+
+type dot struct{ out *Printer }
+
+func (self *dot) BeginStory(story *StoryReport) {}
+
+func (self *dot) Enter(scope *ScopeReport) {}
+
+func (self *dot) Report(report *AssertionResult) {
+ if report.Error != nil {
+ fmt.Print(redColor)
+ self.out.Insert(dotError)
+ } else if report.Failure != "" {
+ fmt.Print(yellowColor)
+ self.out.Insert(dotFailure)
+ } else if report.Skipped {
+ fmt.Print(yellowColor)
+ self.out.Insert(dotSkip)
+ } else {
+ fmt.Print(greenColor)
+ self.out.Insert(dotSuccess)
+ }
+ fmt.Print(resetColor)
+}
+
+func (self *dot) Exit() {}
+
+func (self *dot) EndStory() {}
+
+func (self *dot) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewDotReporter(out *Printer) *dot {
+ self := new(dot)
+ self.out = out
+ return self
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go
new file mode 100644
index 00000000..c396e16b
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go
@@ -0,0 +1,33 @@
+package reporting
+
+type gotestReporter struct{ test T }
+
+func (self *gotestReporter) BeginStory(story *StoryReport) {
+ self.test = story.Test
+}
+
+func (self *gotestReporter) Enter(scope *ScopeReport) {}
+
+func (self *gotestReporter) Report(r *AssertionResult) {
+ if !passed(r) {
+ self.test.Fail()
+ }
+}
+
+func (self *gotestReporter) Exit() {}
+
+func (self *gotestReporter) EndStory() {
+ self.test = nil
+}
+
+func (self *gotestReporter) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewGoTestReporter() *gotestReporter {
+ return new(gotestReporter)
+}
+
+func passed(r *AssertionResult) bool {
+ return r.Error == nil && r.Failure == ""
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/init.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/init.go
new file mode 100644
index 00000000..99c3bd6d
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/init.go
@@ -0,0 +1,94 @@
+package reporting
+
+import (
+ "os"
+ "runtime"
+ "strings"
+)
+
+func init() {
+ if !isColorableTerminal() {
+ monochrome()
+ }
+
+ if runtime.GOOS == "windows" {
+ success, failure, error_ = dotSuccess, dotFailure, dotError
+ }
+}
+
+func BuildJsonReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewJsonReporter(out))
+}
+func BuildDotReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewDotReporter(out),
+ NewProblemReporter(out),
+ consoleStatistics)
+}
+func BuildStoryReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewStoryReporter(out),
+ NewProblemReporter(out),
+ consoleStatistics)
+}
+func BuildSilentReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewSilentProblemReporter(out))
+}
+
+var (
+ newline = "\n"
+ success = "✔"
+ failure = "✘"
+ error_ = "🔥"
+ skip = "⚠"
+ dotSuccess = "."
+ dotFailure = "x"
+ dotError = "E"
+ dotSkip = "S"
+ errorTemplate = "* %s \nLine %d: - %v \n%s\n"
+ failureTemplate = "* %s \nLine %d:\n%s\n%s\n"
+)
+
+var (
+ greenColor = "\033[32m"
+ yellowColor = "\033[33m"
+ redColor = "\033[31m"
+ resetColor = "\033[0m"
+)
+
+var consoleStatistics = NewStatisticsReporter(NewPrinter(NewConsole()))
+
+func SuppressConsoleStatistics() { consoleStatistics.Suppress() }
+func PrintConsoleStatistics() { consoleStatistics.PrintSummary() }
+
+// QuietMode disables all console output symbols. This is only meant to be used
+// for tests that are internal to goconvey where the output is distracting or
+// otherwise not needed in the test output.
+func QuietMode() {
+ success, failure, error_, skip, dotSuccess, dotFailure, dotError, dotSkip = "", "", "", "", "", "", "", ""
+}
+
+func monochrome() {
+ greenColor, yellowColor, redColor, resetColor = "", "", "", ""
+}
+
+func isColorableTerminal() bool {
+ return strings.Contains(os.Getenv("TERM"), "color")
+}
+
+// This interface allows us to pass the *testing.T struct
+// throughout the internals of this tool without ever
+// having to import the "testing" package.
+type T interface {
+ Fail()
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/json.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/json.go
new file mode 100644
index 00000000..f8526979
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/json.go
@@ -0,0 +1,88 @@
+// TODO: under unit test
+
+package reporting
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "strings"
+)
+
+type JsonReporter struct {
+ out *Printer
+ currentKey []string
+ current *ScopeResult
+ index map[string]*ScopeResult
+ scopes []*ScopeResult
+}
+
+func (self *JsonReporter) depth() int { return len(self.currentKey) }
+
+func (self *JsonReporter) BeginStory(story *StoryReport) {}
+
+func (self *JsonReporter) Enter(scope *ScopeReport) {
+ self.currentKey = append(self.currentKey, scope.Title)
+ ID := strings.Join(self.currentKey, "|")
+ if _, found := self.index[ID]; !found {
+ next := newScopeResult(scope.Title, self.depth(), scope.File, scope.Line)
+ self.scopes = append(self.scopes, next)
+ self.index[ID] = next
+ }
+ self.current = self.index[ID]
+}
+
+func (self *JsonReporter) Report(report *AssertionResult) {
+ self.current.Assertions = append(self.current.Assertions, report)
+}
+
+func (self *JsonReporter) Exit() {
+ self.currentKey = self.currentKey[:len(self.currentKey)-1]
+}
+
+func (self *JsonReporter) EndStory() {
+ self.report()
+ self.reset()
+}
+func (self *JsonReporter) report() {
+ scopes := []string{}
+ for _, scope := range self.scopes {
+ serialized, err := json.Marshal(scope)
+ if err != nil {
+ self.out.Println(jsonMarshalFailure)
+ panic(err)
+ }
+ var buffer bytes.Buffer
+ json.Indent(&buffer, serialized, "", " ")
+ scopes = append(scopes, buffer.String())
+ }
+ self.out.Print(fmt.Sprintf("%s\n%s,\n%s\n", OpenJson, strings.Join(scopes, ","), CloseJson))
+}
+func (self *JsonReporter) reset() {
+ self.scopes = []*ScopeResult{}
+ self.index = map[string]*ScopeResult{}
+ self.currentKey = nil
+}
+
+func (self *JsonReporter) Write(content []byte) (written int, err error) {
+ self.current.Output += string(content)
+ return len(content), nil
+}
+
+func NewJsonReporter(out *Printer) *JsonReporter {
+ self := new(JsonReporter)
+ self.out = out
+ self.reset()
+ return self
+}
+
+const OpenJson = ">->->OPEN-JSON->->->" // "⌦"
+const CloseJson = "<-<-<-CLOSE-JSON<-<-<" // "⌫"
+const jsonMarshalFailure = `
+
+GOCONVEY_JSON_MARSHALL_FAILURE: There was an error when attempting to convert test results to JSON.
+Please file a bug report and reference the code that caused this failure if possible.
+
+Here's the panic:
+
+`
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go
new file mode 100644
index 00000000..3dac0d4d
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go
@@ -0,0 +1,60 @@
+package reporting
+
+import (
+ "fmt"
+ "io"
+ "strings"
+)
+
+type Printer struct {
+ out io.Writer
+ prefix string
+}
+
+func (self *Printer) Println(message string, values ...interface{}) {
+ formatted := self.format(message, values...) + newline
+ self.out.Write([]byte(formatted))
+}
+
+func (self *Printer) Print(message string, values ...interface{}) {
+ formatted := self.format(message, values...)
+ self.out.Write([]byte(formatted))
+}
+
+func (self *Printer) Insert(text string) {
+ self.out.Write([]byte(text))
+}
+
+func (self *Printer) format(message string, values ...interface{}) string {
+ var formatted string
+ if len(values) == 0 {
+ formatted = self.prefix + message
+ } else {
+ formatted = self.prefix + fmt_Sprintf(message, values...)
+ }
+ indented := strings.Replace(formatted, newline, newline+self.prefix, -1)
+ return strings.TrimRight(indented, space)
+}
+
+// Extracting fmt.Sprintf to a separate variable circumvents go vet, which, as of go 1.10 is run with go test.
+var fmt_Sprintf = fmt.Sprintf
+
+func (self *Printer) Indent() {
+ self.prefix += pad
+}
+
+func (self *Printer) Dedent() {
+ if len(self.prefix) >= padLength {
+ self.prefix = self.prefix[:len(self.prefix)-padLength]
+ }
+}
+
+func NewPrinter(out io.Writer) *Printer {
+ self := new(Printer)
+ self.out = out
+ return self
+}
+
+const space = " "
+const pad = space + space
+const padLength = len(pad)
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go
new file mode 100644
index 00000000..33d5e147
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go
@@ -0,0 +1,80 @@
+package reporting
+
+import "fmt"
+
+type problem struct {
+ silent bool
+ out *Printer
+ errors []*AssertionResult
+ failures []*AssertionResult
+}
+
+func (self *problem) BeginStory(story *StoryReport) {}
+
+func (self *problem) Enter(scope *ScopeReport) {}
+
+func (self *problem) Report(report *AssertionResult) {
+ if report.Error != nil {
+ self.errors = append(self.errors, report)
+ } else if report.Failure != "" {
+ self.failures = append(self.failures, report)
+ }
+}
+
+func (self *problem) Exit() {}
+
+func (self *problem) EndStory() {
+ self.show(self.showErrors, redColor)
+ self.show(self.showFailures, yellowColor)
+ self.prepareForNextStory()
+}
+func (self *problem) show(display func(), color string) {
+ if !self.silent {
+ fmt.Print(color)
+ }
+ display()
+ if !self.silent {
+ fmt.Print(resetColor)
+ }
+ self.out.Dedent()
+}
+func (self *problem) showErrors() {
+ for i, e := range self.errors {
+ if i == 0 {
+ self.out.Println("\nErrors:\n")
+ self.out.Indent()
+ }
+ self.out.Println(errorTemplate, e.File, e.Line, e.Error, e.StackTrace)
+ }
+}
+func (self *problem) showFailures() {
+ for i, f := range self.failures {
+ if i == 0 {
+ self.out.Println("\nFailures:\n")
+ self.out.Indent()
+ }
+ self.out.Println(failureTemplate, f.File, f.Line, f.Failure, f.StackTrace)
+ }
+}
+
+func (self *problem) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewProblemReporter(out *Printer) *problem {
+ self := new(problem)
+ self.out = out
+ self.prepareForNextStory()
+ return self
+}
+
+func NewSilentProblemReporter(out *Printer) *problem {
+ self := NewProblemReporter(out)
+ self.silent = true
+ return self
+}
+
+func (self *problem) prepareForNextStory() {
+ self.errors = []*AssertionResult{}
+ self.failures = []*AssertionResult{}
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go
new file mode 100644
index 00000000..cce6c5e4
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go
@@ -0,0 +1,39 @@
+package reporting
+
+import "io"
+
+type Reporter interface {
+ BeginStory(story *StoryReport)
+ Enter(scope *ScopeReport)
+ Report(r *AssertionResult)
+ Exit()
+ EndStory()
+ io.Writer
+}
+
+type reporters struct{ collection []Reporter }
+
+func (self *reporters) BeginStory(s *StoryReport) { self.foreach(func(r Reporter) { r.BeginStory(s) }) }
+func (self *reporters) Enter(s *ScopeReport) { self.foreach(func(r Reporter) { r.Enter(s) }) }
+func (self *reporters) Report(a *AssertionResult) { self.foreach(func(r Reporter) { r.Report(a) }) }
+func (self *reporters) Exit() { self.foreach(func(r Reporter) { r.Exit() }) }
+func (self *reporters) EndStory() { self.foreach(func(r Reporter) { r.EndStory() }) }
+
+func (self *reporters) Write(contents []byte) (written int, err error) {
+ self.foreach(func(r Reporter) {
+ written, err = r.Write(contents)
+ })
+ return written, err
+}
+
+func (self *reporters) foreach(action func(Reporter)) {
+ for _, r := range self.collection {
+ action(r)
+ }
+}
+
+func NewReporters(collection ...Reporter) *reporters {
+ self := new(reporters)
+ self.collection = collection
+ return self
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey
new file mode 100644
index 00000000..79982854
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey
@@ -0,0 +1,2 @@
+#ignore
+-timeout=1s
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go
new file mode 100644
index 00000000..712e6ade
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go
@@ -0,0 +1,179 @@
+package reporting
+
+import (
+ "encoding/json"
+ "fmt"
+ "runtime"
+ "strings"
+
+ "github.com/smartystreets/goconvey/convey/gotest"
+)
+
+////////////////// ScopeReport ////////////////////
+
+type ScopeReport struct {
+ Title string
+ File string
+ Line int
+}
+
+func NewScopeReport(title string) *ScopeReport {
+ file, line, _ := gotest.ResolveExternalCaller()
+ self := new(ScopeReport)
+ self.Title = title
+ self.File = file
+ self.Line = line
+ return self
+}
+
+////////////////// ScopeResult ////////////////////
+
+type ScopeResult struct {
+ Title string
+ File string
+ Line int
+ Depth int
+ Assertions []*AssertionResult
+ Output string
+}
+
+func newScopeResult(title string, depth int, file string, line int) *ScopeResult {
+ self := new(ScopeResult)
+ self.Title = title
+ self.Depth = depth
+ self.File = file
+ self.Line = line
+ self.Assertions = []*AssertionResult{}
+ return self
+}
+
+/////////////////// StoryReport /////////////////////
+
+type StoryReport struct {
+ Test T
+ Name string
+ File string
+ Line int
+}
+
+func NewStoryReport(test T) *StoryReport {
+ file, line, name := gotest.ResolveExternalCaller()
+ name = removePackagePath(name)
+ self := new(StoryReport)
+ self.Test = test
+ self.Name = name
+ self.File = file
+ self.Line = line
+ return self
+}
+
+// name comes in looking like "github.com/smartystreets/goconvey/examples.TestName".
+// We only want the stuff after the last '.', which is the name of the test function.
+func removePackagePath(name string) string {
+ parts := strings.Split(name, ".")
+ return parts[len(parts)-1]
+}
+
+/////////////////// FailureView ////////////////////////
+
+// This struct is also declared in github.com/smartystreets/assertions.
+// The json struct tags should be equal in both declarations.
+type FailureView struct {
+ Message string `json:"Message"`
+ Expected string `json:"Expected"`
+ Actual string `json:"Actual"`
+}
+
+////////////////////AssertionResult //////////////////////
+
+type AssertionResult struct {
+ File string
+ Line int
+ Expected string
+ Actual string
+ Failure string
+ Error interface{}
+ StackTrace string
+ Skipped bool
+}
+
+func NewFailureReport(failure string) *AssertionResult {
+ report := new(AssertionResult)
+ report.File, report.Line = caller()
+ report.StackTrace = stackTrace()
+ parseFailure(failure, report)
+ return report
+}
+func parseFailure(failure string, report *AssertionResult) {
+ view := new(FailureView)
+ err := json.Unmarshal([]byte(failure), view)
+ if err == nil {
+ report.Failure = view.Message
+ report.Expected = view.Expected
+ report.Actual = view.Actual
+ } else {
+ report.Failure = failure
+ }
+}
+func NewErrorReport(err interface{}) *AssertionResult {
+ report := new(AssertionResult)
+ report.File, report.Line = caller()
+ report.StackTrace = fullStackTrace()
+ report.Error = fmt.Sprintf("%v", err)
+ return report
+}
+func NewSuccessReport() *AssertionResult {
+ return new(AssertionResult)
+}
+func NewSkipReport() *AssertionResult {
+ report := new(AssertionResult)
+ report.File, report.Line = caller()
+ report.StackTrace = fullStackTrace()
+ report.Skipped = true
+ return report
+}
+
+func caller() (file string, line int) {
+ file, line, _ = gotest.ResolveExternalCaller()
+ return
+}
+
+func stackTrace() string {
+ buffer := make([]byte, 1024*64)
+ n := runtime.Stack(buffer, false)
+ return removeInternalEntries(string(buffer[:n]))
+}
+func fullStackTrace() string {
+ buffer := make([]byte, 1024*64)
+ n := runtime.Stack(buffer, true)
+ return removeInternalEntries(string(buffer[:n]))
+}
+func removeInternalEntries(stack string) string {
+ lines := strings.Split(stack, newline)
+ filtered := []string{}
+ for _, line := range lines {
+ if !isExternal(line) {
+ filtered = append(filtered, line)
+ }
+ }
+ return strings.Join(filtered, newline)
+}
+func isExternal(line string) bool {
+ for _, p := range internalPackages {
+ if strings.Contains(line, p) {
+ return true
+ }
+ }
+ return false
+}
+
+// NOTE: any new packages that host goconvey packages will need to be added here!
+// An alternative is to scan the goconvey directory and then exclude stuff like
+// the examples package but that's nasty too.
+var internalPackages = []string{
+ "goconvey/assertions",
+ "goconvey/convey",
+ "goconvey/execution",
+ "goconvey/gotest",
+ "goconvey/reporting",
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go
new file mode 100644
index 00000000..c3ccd056
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go
@@ -0,0 +1,108 @@
+package reporting
+
+import (
+ "fmt"
+ "sync"
+)
+
+func (self *statistics) BeginStory(story *StoryReport) {}
+
+func (self *statistics) Enter(scope *ScopeReport) {}
+
+func (self *statistics) Report(report *AssertionResult) {
+ self.Lock()
+ defer self.Unlock()
+
+ if !self.failing && report.Failure != "" {
+ self.failing = true
+ }
+ if !self.erroring && report.Error != nil {
+ self.erroring = true
+ }
+ if report.Skipped {
+ self.skipped += 1
+ } else {
+ self.total++
+ }
+}
+
+func (self *statistics) Exit() {}
+
+func (self *statistics) EndStory() {
+ self.Lock()
+ defer self.Unlock()
+
+ if !self.suppressed {
+ self.printSummaryLocked()
+ }
+}
+
+func (self *statistics) Suppress() {
+ self.Lock()
+ defer self.Unlock()
+ self.suppressed = true
+}
+
+func (self *statistics) PrintSummary() {
+ self.Lock()
+ defer self.Unlock()
+ self.printSummaryLocked()
+}
+
+func (self *statistics) printSummaryLocked() {
+ self.reportAssertionsLocked()
+ self.reportSkippedSectionsLocked()
+ self.completeReportLocked()
+}
+func (self *statistics) reportAssertionsLocked() {
+ self.decideColorLocked()
+ self.out.Print("\n%d total %s", self.total, plural("assertion", self.total))
+}
+func (self *statistics) decideColorLocked() {
+ if self.failing && !self.erroring {
+ fmt.Print(yellowColor)
+ } else if self.erroring {
+ fmt.Print(redColor)
+ } else {
+ fmt.Print(greenColor)
+ }
+}
+func (self *statistics) reportSkippedSectionsLocked() {
+ if self.skipped > 0 {
+ fmt.Print(yellowColor)
+ self.out.Print(" (one or more sections skipped)")
+ }
+}
+func (self *statistics) completeReportLocked() {
+ fmt.Print(resetColor)
+ self.out.Print("\n")
+ self.out.Print("\n")
+}
+
+func (self *statistics) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewStatisticsReporter(out *Printer) *statistics {
+ self := statistics{}
+ self.out = out
+ return &self
+}
+
+type statistics struct {
+ sync.Mutex
+
+ out *Printer
+ total int
+ failing bool
+ erroring bool
+ skipped int
+ suppressed bool
+}
+
+func plural(word string, count int) string {
+ if count == 1 {
+ return word
+ }
+ return word + "s"
+}
diff --git a/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/story.go b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/story.go
new file mode 100644
index 00000000..9e73c971
--- /dev/null
+++ b/backend/vendor/github.com/smartystreets/goconvey/convey/reporting/story.go
@@ -0,0 +1,73 @@
+// TODO: in order for this reporter to be completely honest
+// we need to retrofit to be more like the json reporter such that:
+// 1. it maintains ScopeResult collections, which count assertions
+// 2. it reports only after EndStory(), so that all tick marks
+// are placed near the appropriate title.
+// 3. Under unit test
+
+package reporting
+
+import (
+ "fmt"
+ "strings"
+)
+
+type story struct {
+ out *Printer
+ titlesById map[string]string
+ currentKey []string
+}
+
+func (self *story) BeginStory(story *StoryReport) {}
+
+func (self *story) Enter(scope *ScopeReport) {
+ self.out.Indent()
+
+ self.currentKey = append(self.currentKey, scope.Title)
+ ID := strings.Join(self.currentKey, "|")
+
+ if _, found := self.titlesById[ID]; !found {
+ self.out.Println("")
+ self.out.Print(scope.Title)
+ self.out.Insert(" ")
+ self.titlesById[ID] = scope.Title
+ }
+}
+
+func (self *story) Report(report *AssertionResult) {
+ if report.Error != nil {
+ fmt.Print(redColor)
+ self.out.Insert(error_)
+ } else if report.Failure != "" {
+ fmt.Print(yellowColor)
+ self.out.Insert(failure)
+ } else if report.Skipped {
+ fmt.Print(yellowColor)
+ self.out.Insert(skip)
+ } else {
+ fmt.Print(greenColor)
+ self.out.Insert(success)
+ }
+ fmt.Print(resetColor)
+}
+
+func (self *story) Exit() {
+ self.out.Dedent()
+ self.currentKey = self.currentKey[:len(self.currentKey)-1]
+}
+
+func (self *story) EndStory() {
+ self.titlesById = make(map[string]string)
+ self.out.Println("\n")
+}
+
+func (self *story) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewStoryReporter(out *Printer) *story {
+ self := new(story)
+ self.out = out
+ self.titlesById = make(map[string]string)
+ return self
+}
diff --git a/backend/vendor/github.com/ugorji/go/codec/xml.go b/backend/vendor/github.com/ugorji/go/codec/xml.go
deleted file mode 100644
index 19fc36ca..00000000
--- a/backend/vendor/github.com/ugorji/go/codec/xml.go
+++ /dev/null
@@ -1,508 +0,0 @@
-// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
-// Use of this source code is governed by a MIT license found in the LICENSE file.
-
-// +build ignore
-
-package codec
-
-import "reflect"
-
-/*
-
-A strict Non-validating namespace-aware XML 1.0 parser and (en|de)coder.
-
-We are attempting this due to perceived issues with encoding/xml:
- - Complicated. It tried to do too much, and is not as simple to use as json.
- - Due to over-engineering, reflection is over-used AND performance suffers:
- java is 6X faster:http://fabsk.eu/blog/category/informatique/dev/golang/
- even PYTHON performs better: http://outgoing.typepad.com/outgoing/2014/07/exploring-golang.html
-
-codec framework will offer the following benefits
- - VASTLY improved performance (when using reflection-mode or codecgen)
- - simplicity and consistency: with the rest of the supported formats
- - all other benefits of codec framework (streaming, codegeneration, etc)
-
-codec is not a drop-in replacement for encoding/xml.
-It is a replacement, based on the simplicity and performance of codec.
-Look at it like JAXB for Go.
-
-Challenges:
- - Need to output XML preamble, with all namespaces at the right location in the output.
- - Each "end" block is dynamic, so we need to maintain a context-aware stack
- - How to decide when to use an attribute VS an element
- - How to handle chardata, attr, comment EXPLICITLY.
- - Should it output fragments?
- e.g. encoding a bool should just output true OR false, which is not well-formed XML.
-
-Extend the struct tag. See representative example:
- type X struct {
- ID uint8 `codec:"http://ugorji.net/x-namespace xid id,omitempty,toarray,attr,cdata"`
- // format: [namespace-uri ][namespace-prefix ]local-name, ...
- }
-
-Based on this, we encode
- - fields as elements, BUT
- encode as attributes if struct tag contains ",attr" and is a scalar (bool, number or string)
- - text as entity-escaped text, BUT encode as CDATA if struct tag contains ",cdata".
-
-To handle namespaces:
- - XMLHandle is denoted as being namespace-aware.
- Consequently, we WILL use the ns:name pair to encode and decode if defined, else use the plain name.
- - *Encoder and *Decoder know whether the Handle "prefers" namespaces.
- - add *Encoder.getEncName(*structFieldInfo).
- No one calls *structFieldInfo.indexForEncName directly anymore
- - OR better yet: indexForEncName is namespace-aware, and helper.go is all namespace-aware
- indexForEncName takes a parameter of the form namespace:local-name OR local-name
- - add *Decoder.getStructFieldInfo(encName string) // encName here is either like abc, or h1:nsabc
- by being a method on *Decoder, or maybe a method on the Handle itself.
- No one accesses .encName anymore
- - let encode.go and decode.go use these (for consistency)
- - only problem exists for gen.go, where we create a big switch on encName.
- Now, we also have to add a switch on strings.endsWith(kName, encNsName)
- - gen.go will need to have many more methods, and then double-on the 2 switch loops like:
- switch k {
- case "abc" : x.abc()
- case "def" : x.def()
- default {
- switch {
- case !nsAware: panic(...)
- case strings.endsWith(":abc"): x.abc()
- case strings.endsWith(":def"): x.def()
- default: panic(...)
- }
- }
- }
-
-The structure below accommodates this:
-
- type typeInfo struct {
- sfi []*structFieldInfo // sorted by encName
- sfins // sorted by namespace
- sfia // sorted, to have those with attributes at the top. Needed to write XML appropriately.
- sfip // unsorted
- }
- type structFieldInfo struct {
- encName
- nsEncName
- ns string
- attr bool
- cdata bool
- }
-
-indexForEncName is now an internal helper function that takes a sorted array
-(one of ti.sfins or ti.sfi). It is only used by *Encoder.getStructFieldInfo(...)
-
-There will be a separate parser from the builder.
-The parser will have a method: next() xmlToken method. It has lookahead support,
-so you can pop multiple tokens, make a determination, and push them back in the order popped.
-This will be needed to determine whether we are "nakedly" decoding a container or not.
-The stack will be implemented using a slice and push/pop happens at the [0] element.
-
-xmlToken has fields:
- - type uint8: 0 | ElementStart | ElementEnd | AttrKey | AttrVal | Text
- - value string
- - ns string
-
-SEE: http://www.xml.com/pub/a/98/10/guide0.html?page=3#ENTDECL
-
-The following are skipped when parsing:
- - External Entities (from external file)
- - Notation Declaration e.g.
- - Entity Declarations & References
- - XML Declaration (assume UTF-8)
- - XML Directive i.e.
- - Other Declarations: Notation, etc.
- - Comment
- - Processing Instruction
- - schema / DTD for validation:
- We are not a VALIDATING parser. Validation is done elsewhere.
- However, some parts of the DTD internal subset are used (SEE BELOW).
- For Attribute List Declarations e.g.
-
- We considered using the ATTLIST to get "default" value, but not to validate the contents. (VETOED)
-
-The following XML features are supported
- - Namespace
- - Element
- - Attribute
- - cdata
- - Unicode escape
-
-The following DTD (when as an internal sub-set) features are supported:
- - Internal Entities e.g.
- AND entities for the set: [<>&"']
- - Parameter entities e.g.
-
-
-At decode time, a structure containing the following is kept
- - namespace mapping
- - default attribute values
- - all internal entities (<>&"' and others written in the document)
-
-When decode starts, it parses XML namespace declarations and creates a map in the
-xmlDecDriver. While parsing, that map continuously gets updated.
-The only problem happens when a namespace declaration happens on the node that it defines.
-e.g.
-To handle this, each Element must be fully parsed at a time,
-even if it amounts to multiple tokens which are returned one at a time on request.
-
-xmlns is a special attribute name.
- - It is used to define namespaces, including the default
- - It is never returned as an AttrKey or AttrVal.
- *We may decide later to allow user to use it e.g. you want to parse the xmlns mappings into a field.*
-
-Number, bool, null, mapKey, etc can all be decoded from any xmlToken.
-This accommodates map[int]string for example.
-
-It should be possible to create a schema from the types,
-or vice versa (generate types from schema with appropriate tags).
-This is however out-of-scope from this parsing project.
-
-We should write all namespace information at the first point that it is referenced in the tree,
-and use the mapping for all child nodes and attributes. This means that state is maintained
-at a point in the tree. This also means that calls to Decode or MustDecode will reset some state.
-
-When decoding, it is important to keep track of entity references and default attribute values.
-It seems these can only be stored in the DTD components. We should honor them when decoding.
-
-Configuration for XMLHandle will look like this:
-
- XMLHandle
- DefaultNS string
- // Encoding:
- NS map[string]string // ns URI to key, used for encoding
- // Decoding: in case ENTITY declared in external schema or dtd, store info needed here
- Entities map[string]string // map of entity rep to character
-
-
-During encode, if a namespace mapping is not defined for a namespace found on a struct,
-then we create a mapping for it using nsN (where N is 1..1000000, and doesn't conflict
-with any other namespace mapping).
-
-Note that different fields in a struct can have different namespaces.
-However, all fields will default to the namespace on the _struct field (if defined).
-
-An XML document is a name, a map of attributes and a list of children.
-Consequently, we cannot "DecodeNaked" into a map[string]interface{} (for example).
-We have to "DecodeNaked" into something that resembles XML data.
-
-To support DecodeNaked (decode into nil interface{}), we have to define some "supporting" types:
- type Name struct { // Preferred. Less allocations due to conversions.
- Local string
- Space string
- }
- type Element struct {
- Name Name
- Attrs map[Name]string
- Children []interface{} // each child is either *Element or string
- }
-Only two "supporting" types are exposed for XML: Name and Element.
-
-// ------------------
-
-We considered 'type Name string' where Name is like "Space Local" (space-separated).
-We decided against it, because each creation of a name would lead to
-double allocation (first convert []byte to string, then concatenate them into a string).
-The benefit is that it is faster to read Attrs from a map. But given that Element is a value
-object, we want to eschew methods and have public exposed variables.
-
-We also considered the following, where xml types were not value objects, and we used
-intelligent accessor methods to extract information and for performance.
-*** WE DECIDED AGAINST THIS. ***
- type Attr struct {
- Name Name
- Value string
- }
- // Element is a ValueObject: There are no accessor methods.
- // Make element self-contained.
- type Element struct {
- Name Name
- attrsMap map[string]string // where key is "Space Local"
- attrs []Attr
- childrenT []string
- childrenE []Element
- childrenI []int // each child is a index into T or E.
- }
- func (x *Element) child(i) interface{} // returns string or *Element
-
-// ------------------
-
-Per XML spec and our default handling, white space is always treated as
-insignificant between elements, except in a text node. The xml:space='preserve'
-attribute is ignored.
-
-**Note: there is no xml: namespace. The xml: attributes were defined before namespaces.**
-**So treat them as just "directives" that should be interpreted to mean something**.
-
-On encoding, we support indenting aka prettifying markup in the same way we support it for json.
-
-A document or element can only be encoded/decoded from/to a struct. In this mode:
- - struct name maps to element name (or tag-info from _struct field)
- - fields are mapped to child elements or attributes
-
-A map is either encoded as attributes on current element, or as a set of child elements.
-Maps are encoded as attributes iff their keys and values are primitives (number, bool, string).
-
-A list is encoded as a set of child elements.
-
-Primitives (number, bool, string) are encoded as an element, attribute or text
-depending on the context.
-
-Extensions must encode themselves as a text string.
-
-Encoding is tough, specifically when encoding mappings, because we need to encode
-as either attribute or element. To do this, we need to default to encoding as attributes,
-and then let Encoder inform the Handle when to start encoding as nodes.
-i.e. Encoder does something like:
-
- h.EncodeMapStart()
- h.Encode(), h.Encode(), ...
- h.EncodeMapNotAttrSignal() // this is not a bool, because it's a signal
- h.Encode(), h.Encode(), ...
- h.EncodeEnd()
-
-Only XMLHandle understands this, and will set itself to start encoding as elements.
-
-This support extends to maps. For example, if a struct field is a map, and it has
-the struct tag signifying it should be attr, then all its fields are encoded as attributes.
-e.g.
-
- type X struct {
- M map[string]int `codec:"m,attr"` // encode keys as attributes named
- }
-
-Question:
- - if encoding a map, what if map keys have spaces in them???
- Then they cannot be attributes or child elements. Error.
-
-Options to consider adding later:
- - For attribute values, normalize by trimming beginning and ending white space,
- and converting every white space sequence to a single space.
- - ATTLIST restrictions are enforced.
- e.g. default value of xml:space, skipping xml:XYZ style attributes, etc.
- - Consider supporting NON-STRICT mode (e.g. to handle HTML parsing).
- Some elements e.g. br, hr, etc need not close and should be auto-closed
- ... (see http://www.w3.org/TR/html4/loose.dtd)
- An expansive set of entities are pre-defined.
- - Have easy way to create a HTML parser:
- add a HTML() method to XMLHandle, that will set Strict=false, specify AutoClose,
- and add HTML Entities to the list.
- - Support validating element/attribute XMLName before writing it.
- Keep this behind a flag, which is set to false by default (for performance).
- type XMLHandle struct {
- CheckName bool
- }
-
-Misc:
-
-ROADMAP (1 weeks):
- - build encoder (1 day)
- - build decoder (based off xmlParser) (1 day)
- - implement xmlParser (2 days).
- Look at encoding/xml for inspiration.
- - integrate and TEST (1 days)
- - write article and post it (1 day)
-
-// ---------- MORE NOTES FROM 2017-11-30 ------------
-
-when parsing
-- parse the attributes first
-- then parse the nodes
-
-basically:
-- if encoding a field: we use the field name for the wrapper
-- if encoding a non-field, then just use the element type name
-
- map[string]string ==> ... or
- ... OR
- val1val2... <- PREFERED
- []string ==> v1v2...
- string v1 ==> v1
- bool true ==> true
- float 1.0 ==> 1.0
- ...
-
- F1 map[string]string ==> abcval... OR
- val... OR
- val... <- PREFERED
- F2 []string ==> v1v2...
- F3 bool ==> true
- ...
-
-- a scalar is encoded as:
- (value) of type T ==>
- (value) of field F ==>
-- A kv-pair is encoded as:
- (key,value) ==> OR